test.py
Go to the documentation of this file.
1 # License: Apache 2.0. See LICENSE file in root directory.
2 # Copyright(c) 2021 Intel Corporation. All Rights Reserved.
3 
4 """
5 This module is for formatting and writing unit-tests in python. The general format is as follows
6 1. Use start() to start a test and give it, as an argument, the name of the test
7 2. Use whatever check functions are relevant to test the run
8 3. Use finish() to signal the end of the test
9 4. Repeat stages 1-3 as the number of tests you want to run in the file
10 5. Use print_results_and_exit() to print the number of tests and assertions that passed/failed in the correct format
11  before exiting with 0 if all tests passed or with 1 if there was a failed test
12 
13 In addition you may want to use the 'info' functions in this module to add more detailed
14 messages in case of a failed check
15 """
16 
17 import os, sys, subprocess, traceback, platform
18 
19 from rspy import log
20 
21 n_assertions = 0
22 n_failed_assertions = 0
23 n_tests = 0
24 n_failed_tests = 0
25 test_failed = False
26 test_in_progress = False
27 test_info = {} # Dictionary for holding additional information to print in case of a failed check.
28 
29 
30 def set_env_vars( env_vars ):
31  """
32  We want certain environment variables set when we get here. We assume they're not set.
33 
34  However, it is impossible to change the current running environment to see them. Instead, we rerun ourselves
35  in a child process that inherits the environment we set.
36 
37  To do this, we depend on a specific argument in sys.argv that tells us this is the rerun (meaning child
38  process). When we see it, we assume the variables are set and don't do anything else.
39 
40  For this to work well, the environment variable requirement (set_env_vars call) should appear as one of the
41  first lines of the test.
42 
43  :param env_vars: A dictionary where the keys are the name of the environment variable and the values are the
44  wanted values in string form (environment variables must be strings)
45  """
46  if sys.argv[-1] != 'rerun':
47  log.d( 'environment variables needed:', env_vars )
48  for env_var, val in env_vars.items():
49  os.environ[env_var] = val
50  cmd = [sys.executable]
51  if 'site' not in sys.modules:
52  # -S : don't imply 'import site' on initialization
53  cmd += ["-S"]
54  if sys.flags.verbose:
55  # -v : verbose (trace import statements)
56  cmd += ["-v"]
57  cmd += sys.argv # --debug, or any other args
58  cmd += ["rerun"]
59  log.d( 'running:', cmd )
60  p = subprocess.run( cmd, stderr=subprocess.PIPE, universal_newlines=True )
61  sys.exit( p.returncode )
62  log.d( 'rerun detected' )
63  sys.argv = sys.argv[:-1] # Remove the rerun
64 
65 
67  """
68  :return: the first device that was found, if no device is found the test is skipped. That way we can still run
69  the unit-tests when no device is connected and not fail the tests that check a connected device
70  """
71  import pyrealsense2 as rs
72  c = rs.context()
73  if not c.devices.size(): # if no device is connected we skip the test
74  print("No device found, skipping test")
75  sys.exit( 0 )
76  dev = c.devices[0]
77  log.d( 'found', dev )
78  return dev
79 
80 
82  """
83  :param product_line: The product line of the wanted devices
84  :return: A list of devices of specific product line that was found, if no device is found the test is skipped.
85  That way we can still run the unit-tests when no device is connected
86  and not fail the tests that check a connected device
87  """
88  import pyrealsense2 as rs
89  c = rs.context()
90  devices_list = c.query_devices(product_line)
91  if devices_list.size() == 0:
92  print( "No device of the", product_line, "product line was found; skipping test" )
93  sys.exit( 0 )
94  log.d( 'found', devices_list.size(), product_line, 'devices:', [dev for dev in devices_list] )
95  return devices_list
96 
97 
99  """
100  Function for printing the current call stack. Used when an assertion fails
101  """
102  print( 'Traceback (most recent call last):' )
103  stack = traceback.format_stack()
104  # Avoid stack trace into format_stack():
105  # File "C:/work/git/lrs\unit-tests\py\rspy\test.py", line 124, in check
106  # print_stack()
107  # File "C:/work/git/lrs\unit-tests\py\rspy\test.py", line 87, in print_stack
108  # stack = traceback.format_stack()
109  stack = stack[:-2]
110  for line in reversed( stack ):
111  print( line, end = '' ) # format_stack() adds \n
112 
113 
114 """
115 The following functions are for asserting test cases:
116 The check family of functions tests an expression and continues the test whether the assertion succeeded or failed.
117 The require family are equivalent but execution is aborted if the assertion fails. In this module, the require family
118 is used by sending abort=True to check functions
119 """
120 
121 
123  """
124  Function for when a check fails
125  """
126  global n_failed_assertions, test_failed
127  n_failed_assertions += 1
128  test_failed = True
129  print_info()
130 
131 
132 def abort():
133  log.e( "Aborting test" )
134  sys.exit( 1 )
135 
136 
137 def check(exp, abort_if_failed = False):
138  """
139  Basic function for asserting expressions.
140  :param exp: An expression to be asserted, if false the assertion failed
141  :param abort_if_failed: If True and assertion failed the test will be aborted
142  :return: True if assertion passed, False otherwise
143  """
144  global n_assertions
145  n_assertions += 1
146  if not exp:
147  print_stack()
148  print( " check failed; received", exp )
149  check_failed()
150  if abort_if_failed:
151  abort()
152  return False
153  reset_info()
154  return True
155 
156 
157 def check_equal(result, expected, abort_if_failed = False):
158  """
159  Used for asserting a variable has the expected value
160  :param result: The actual value of a variable
161  :param expected: The expected value of the variable
162  :param abort_if_failed: If True and assertion failed the test will be aborted
163  :return: True if assertion passed, False otherwise
164  """
165  if type(expected) == list:
166  print("check_equal should not be used for lists. Use check_equal_lists instead")
167  if abort_if_failed:
168  abort()
169  return False
170  global n_assertions
171  n_assertions += 1
172  if result != expected:
173  print_stack()
174  print( " result :", result )
175  print( " expected:", expected )
176  check_failed()
177  if abort_if_failed:
178  abort()
179  return False
180  reset_info()
181  return True
182 
183 
184 def unreachable( abort_if_failed = False ):
185  """
186  Used to assert that a certain section of code (exp: an if block) is not reached
187  :param abort_if_failed: If True and this function is reached the test will be aborted
188  """
189  check(False, abort_if_failed)
190 
191 
193  """
194  Used to assert that an except block is not reached. It's different from unreachable because it expects
195  to be in an except block and prints the stack of the error and not the call-stack for this function
196  """
197  global n_assertions
198  n_assertions += 1
199  traceback.print_exc( file = sys.stdout )
200  check_failed()
201 
202 
203 def check_equal_lists(result, expected, abort_if_failed = False):
204  """
205  Used to assert that 2 lists are identical. python "equality" (using ==) requires same length & elements
206  but not necessarily same ordering. Here we require exactly the same, including ordering.
207  :param result: The actual list
208  :param expected: The expected list
209  :param abort_if_failed: If True and assertion failed the test will be aborted
210  :return: True if assertion passed, False otherwise
211  """
212  global n_assertions
213  n_assertions += 1
214  failed = False
215  if len(result) != len(expected):
216  failed = True
217  print("Check equal lists failed due to lists of different sizes:")
218  print("The resulted list has", len(result), "elements, but the expected list has", len(expected), "elements")
219  i = 0
220  for res, exp in zip(result, expected):
221  if res != exp:
222  failed = True
223  print("Check equal lists failed due to unequal elements:")
224  print("The element of index", i, "in both lists was not equal")
225  i += 1
226  if failed:
227  print_stack()
228  print( " result list :", result )
229  print( " expected list:", expected )
230  check_failed()
231  if abort_if_failed:
232  abort()
233  return False
234  reset_info()
235  return True
236 
237 
238 def check_exception(exception, expected_type, expected_msg = None, abort_if_failed = False):
239  """
240  Used to assert a certain type of exception was raised, placed in the except block
241  :param exception: The exception that was raised
242  :param expected_type: The expected type of exception
243  :param expected_msg: The expected message in the exception
244  :param abort_if_failed: If True and assertion failed the test will be aborted
245  :return: True if assertion passed, False otherwise
246  """
247  failed = False
248  if type(exception) != expected_type:
249  failed = [ " raised exception was of type", type(exception), "\n but expected type", expected_type ]
250  elif expected_msg and str(exception) != expected_msg:
251  failed = [ " exception message:", str(exception), "\n but we expected:", expected_msg ]
252  if failed:
253  print_stack()
254  print( *failed )
255  check_failed()
256  if abort_if_failed:
257  abort()
258  return False
259  reset_info()
260  return True
261 
262 
263 def check_frame_drops(frame, previous_frame_number, allowed_drops = 1):
264  """
265  Used for checking frame drops while streaming
266  :param frame: Current frame being checked
267  :param previous_frame_number: Number of the previous frame
268  :param allowed_drops: Maximum number of frame drops we accept
269  :return: False if dropped too many frames or frames were out of order, True otherwise
270  """
271  global test_in_progress
272  if not test_in_progress:
273  return True
274  frame_number = frame.get_frame_number()
275  failed = False
276  if previous_frame_number > 0:
277  dropped_frames = frame_number - (previous_frame_number + 1)
278  if dropped_frames > allowed_drops:
279  print( dropped_frames, "frame(s) starting from frame", previous_frame_number + 1, "were dropped" )
280  failed = True
281  elif dropped_frames < 0:
282  print( "Frames repeated or out of order. Got frame", frame_number, "after frame",
283  previous_frame_number)
284  failed = True
285  if failed:
286  fail()
287  return False
288  reset_info()
289  return True
290 
291 
293  """
294  Class representing the information stored in test_info dictionary
295  """
296  def __init__(self, value, persistent = False):
297  self.value = value
298  self.persistent = persistent
299 
300 
301 def info( name, value, persistent = False ):
302  """
303  This function is used to store additional information to print in case of a failed test. This information is
304  erased after the next check. The information is stored in the dictionary test_info, Keys are names (strings)
305  and the items are of Information class
306  If information with the given name is already stored it will be replaced
307  :param name: The name of the variable
308  :param value: The value this variable stores
309  :param persistent: If this parameter is True, the information stored will be kept after the following check
310  and will only be erased at the end of the test ( or when reset_info is called with True)
311  """
312  global test_info
313  test_info[name] = Information(value, persistent)
314 
315 
316 def reset_info(persistent = False):
317  """
318  erases the stored information
319  :param persistent: If this parameter is True, even the persistent information will be erased
320  """
321  global test_info
322  if persistent:
323  test_info.clear()
324  else:
325  for name, information in test_info.items():
326  if not information.persistent:
327  test_info.pop(name)
328 
329 
331  global test_info
332  if not test_info: # No information is stored
333  return
334  print("Printing information")
335  for name, information in test_info.items():
336  print("Name:", name, " value:", information.value)
337  reset_info()
338 
339 
340 def fail():
341  """
342  Function for manually failing a test in case you want a specific test that does not fit any check function
343  """
345  global test_failed
346  if not test_failed:
347  test_failed = True
348 
349 
350 def check_test_in_progress( in_progress = True ):
351  global test_in_progress
352  if test_in_progress != in_progress:
353  if test_in_progress:
354  raise RuntimeError( "test case is already running" )
355  else:
356  raise RuntimeError( "no test case is running" )
357 
358 
359 def start(*test_name):
360  """
361  Used at the beginning of each test to reset the global variables
362  :param test_name: Any number of arguments that combined give the name of this test
363  """
365  global n_tests, test_failed, test_in_progress
366  n_tests += 1
367  test_failed = False
368  test_in_progress = True
369  reset_info( persistent = True )
370  print( *test_name )
371 
372 
373 def finish():
374  """
375  Used at the end of each test to check if it passed and print the answer
376  """
378  global test_failed, n_failed_tests, test_in_progress
379  if test_failed:
380  n_failed_tests += 1
381  print("Test failed")
382  else:
383  print("Test passed")
384  test_in_progress = False
385 
386 
388  """
389  For use only in-between test-cases, this will separate them in some visual way so as
390  to be easier to differentiate.
391  """
392  check_test_in_progress( False )
393  global n_tests
394  if n_tests:
395  print()
396  print( '___' )
397 
398 
400  """
401  Used to print the results of the tests in the file. The format has to agree with the expected format in check_log()
402  in run-unit-tests and with the C++ format using Catch
403  """
405  global n_assertions, n_tests, n_failed_assertions, n_failed_tests
406  if n_failed_tests:
407  passed = n_assertions - n_failed_assertions
408  print("test cases:", n_tests, "|" , n_failed_tests, "failed")
409  print("assertions:", n_assertions, "|", passed, "passed |", n_failed_assertions, "failed")
410  sys.exit(1)
411  print("All tests passed (" + str(n_assertions) + " assertions in " + str(n_tests) + " test cases)")
412  sys.exit(0)
def check_frame_drops(frame, previous_frame_number, allowed_drops=1)
Definition: test.py:263
def find_devices_by_product_line_or_exit(product_line)
Definition: test.py:81
def check_exception(exception, expected_type, expected_msg=None, abort_if_failed=False)
Definition: test.py:238
def print_results_and_exit()
Definition: test.py:399
def find_first_device_or_exit()
Definition: test.py:66
def unexpected_exception()
Definition: test.py:192
def fail()
Definition: test.py:340
def unreachable(abort_if_failed=False)
Definition: test.py:184
def info(name, value, persistent=False)
Definition: test.py:301
def reset_info(persistent=False)
Definition: test.py:316
def print_separator()
Definition: test.py:387
def start(test_name)
Definition: test.py:359
def check(exp, abort_if_failed=False)
Definition: test.py:137
def check_equal_lists(result, expected, abort_if_failed=False)
Definition: test.py:203
def finish()
Definition: test.py:373
def check_failed()
Definition: test.py:122
def print_info()
Definition: test.py:330
static std::string print(const transformation &tf)
def check_equal(result, expected, abort_if_failed=False)
Definition: test.py:157
def set_env_vars(env_vars)
Definition: test.py:30
def abort()
Definition: test.py:132
def __init__(self, value, persistent=False)
Definition: test.py:296
def print_stack()
Definition: test.py:98
def check_test_in_progress(in_progress=True)
Definition: test.py:350


librealsense2
Author(s): Sergey Dorodnicov , Doron Hirshberg , Mark Horn , Reagan Lopez , Itay Carpis
autogenerated on Mon May 3 2021 02:50:11