Variables | |
aligned_stream = rs.align(rs.stream.color) | |
tuple | bbox = (int(left), int(top), int(width), int(height)) |
bottom = detection[6]*H | |
tuple | bottomLeftCornerOfText = (p1[0], p1[1]+20) |
color_frame = frames.get_color_frame() | |
color_image = np.asanyarray(color_frame.get_data()) | |
config = rs.config() | |
depth_frame = frames.get_depth_frame().as_depth_frame() | |
depth_image = np.asanyarray(depth_frame.get_data()) | |
detections = net.forward() | |
font = cv2.FONT_HERSHEY_SIMPLEX | |
tuple | fontColor = (255, 255, 255) |
int | fontScale = 1 |
frames = pipeline.wait_for_frames() | |
int | H = 480 |
height = bottom-top | |
string | height_txt = str(height)+"[m]" |
idx = int(detection[1]) | |
left = detection[3]*W | |
int | lineType = 2 |
my = np.amin(ys, initial=1) | |
My = np.amax(ys, initial=-1) | |
net = cv2.dnn.readNetFromTensorflow("frozen_inference_graph.pb", "faster_rcnn_inception_v2_coco_2018_01_28.pbtxt") | |
obj_points = verts[int(bbox[1]):int(bbox[1] + bbox[3]), int(bbox[0]):int(bbox[0] + bbox[2])].reshape(-1, 3) | |
tuple | p1 = (int(bbox[0]), int(bbox[1])) |
tuple | p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) |
pipeline = rs.pipeline() | |
point_cloud = rs.pointcloud() | |
points = point_cloud.calculate(depth_frame) | |
right = detection[5]*W | |
tuple | scaled_size = (int(W), int(H)) |
score = float(detection[2]) | |
top = detection[4]*H | |
verts = np.asanyarray(points.get_vertices()).view(np.float32).reshape(-1, W, 3) | |
int | W = 848 |
width = right-left | |
ys = obj_points[:,1] | |
z = np.median(zs) | |
zs = obj_points[:,2] | |
example3 - opencv deploy.aligned_stream = rs.align(rs.stream.color) |
Definition at line 18 of file example3 - opencv deploy.py.
Definition at line 60 of file example3 - opencv deploy.py.
example3 - opencv deploy.bottom = detection[6]*H |
Definition at line 56 of file example3 - opencv deploy.py.
Definition at line 85 of file example3 - opencv deploy.py.
example3 - opencv deploy.color_frame = frames.get_color_frame() |
Definition at line 27 of file example3 - opencv deploy.py.
example3 - opencv deploy.color_image = np.asanyarray(color_frame.get_data()) |
Definition at line 39 of file example3 - opencv deploy.py.
example3 - opencv deploy.config = rs.config() |
Definition at line 10 of file example3 - opencv deploy.py.
example3 - opencv deploy.depth_frame = frames.get_depth_frame().as_depth_frame() |
Definition at line 28 of file example3 - opencv deploy.py.
example3 - opencv deploy.depth_image = np.asanyarray(depth_frame.get_data()) |
Definition at line 34 of file example3 - opencv deploy.py.
example3 - opencv deploy.detections = net.forward() |
Definition at line 43 of file example3 - opencv deploy.py.
example3 - opencv deploy.font = cv2.FONT_HERSHEY_SIMPLEX |
Definition at line 84 of file example3 - opencv deploy.py.
tuple example3 - opencv deploy.fontColor = (255, 255, 255) |
Definition at line 87 of file example3 - opencv deploy.py.
int example3 - opencv deploy.fontScale = 1 |
Definition at line 86 of file example3 - opencv deploy.py.
example3 - opencv deploy.frames = pipeline.wait_for_frames() |
Definition at line 25 of file example3 - opencv deploy.py.
int example3 - opencv deploy.H = 480 |
Definition at line 6 of file example3 - opencv deploy.py.
Definition at line 58 of file example3 - opencv deploy.py.
Definition at line 81 of file example3 - opencv deploy.py.
example3 - opencv deploy.idx = int(detection[1]) |
Definition at line 49 of file example3 - opencv deploy.py.
example3 - opencv deploy.left = detection[3]*W |
Definition at line 53 of file example3 - opencv deploy.py.
int example3 - opencv deploy.lineType = 2 |
Definition at line 88 of file example3 - opencv deploy.py.
example3 - opencv deploy.my = np.amin(ys, initial=1) |
Definition at line 75 of file example3 - opencv deploy.py.
example3 - opencv deploy.My = np.amax(ys, initial=-1) |
Definition at line 76 of file example3 - opencv deploy.py.
example3 - opencv deploy.net = cv2.dnn.readNetFromTensorflow("frozen_inference_graph.pb", "faster_rcnn_inception_v2_coco_2018_01_28.pbtxt") |
Definition at line 23 of file example3 - opencv deploy.py.
example3 - opencv deploy.obj_points = verts[int(bbox[1]):int(bbox[1] + bbox[3]), int(bbox[0]):int(bbox[0] + bbox[2])].reshape(-1, 3) |
Definition at line 67 of file example3 - opencv deploy.py.
Definition at line 62 of file example3 - opencv deploy.py.
Definition at line 63 of file example3 - opencv deploy.py.
example3 - opencv deploy.pipeline = rs.pipeline() |
Definition at line 9 of file example3 - opencv deploy.py.
example3 - opencv deploy.point_cloud = rs.pointcloud() |
Definition at line 19 of file example3 - opencv deploy.py.
example3 - opencv deploy.points = point_cloud.calculate(depth_frame) |
Definition at line 30 of file example3 - opencv deploy.py.
example3 - opencv deploy.right = detection[5]*W |
Definition at line 55 of file example3 - opencv deploy.py.
Definition at line 41 of file example3 - opencv deploy.py.
example3 - opencv deploy.score = float(detection[2]) |
Definition at line 48 of file example3 - opencv deploy.py.
example3 - opencv deploy.top = detection[4]*H |
Definition at line 54 of file example3 - opencv deploy.py.
example3 - opencv deploy.verts = np.asanyarray(points.get_vertices()).view(np.float32).reshape(-1, W, 3) |
Definition at line 31 of file example3 - opencv deploy.py.
int example3 - opencv deploy.W = 848 |
Definition at line 5 of file example3 - opencv deploy.py.
Definition at line 57 of file example3 - opencv deploy.py.
example3 - opencv deploy.ys = obj_points[:,1] |
Definition at line 72 of file example3 - opencv deploy.py.
example3 - opencv deploy.z = np.median(zs) |
Definition at line 70 of file example3 - opencv deploy.py.
example3 - opencv deploy.zs = obj_points[:,2] |
Definition at line 68 of file example3 - opencv deploy.py.