Skip to content

Commit 58bf53d

Browse files
authored
Merge pull request #205 from zivid/2025-07-09-update-python-samples
Samples: Automatic updates to public repository
2 parents 0b28cc9 + 1cebba1 commit 58bf53d

File tree

4 files changed

+183
-3
lines changed

4 files changed

+183
-3
lines changed

README.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,12 @@ from the camera can be used.
123123
to the ArUco marker on a Zivid Calibration Board.
124124
- [roi\_box\_via\_checkerboard](https://github.com/zivid/zivid-python-samples/tree/master/source/applications/advanced/roi_box_via_checkerboard.py) - Filter the point cloud based on a ROI box given relative
125125
to the Zivid Calibration Board.
126+
- [stitch\_continuously\_rotating\_object](https://github.com/zivid/zivid-python-samples/tree/master/source/applications/advanced/stitch_continuously_rotating_object.py) - Stitch point clouds from a continuously rotating object
127+
without pre-alignment using Local Point Cloud Registration
128+
and apply Voxel Downsample.
129+
- [stitch\_via\_local\_point\_cloud\_registration](https://github.com/zivid/zivid-python-samples/tree/master/source/applications/advanced/stitch_via_local_point_cloud_registration.py) - Stitch two point clouds using a transformation estimated
130+
by Local Point Cloud Registration and apply Voxel
131+
Downsample.
126132
- [transform\_point\_cloud\_from\_millimeters\_to\_meters](https://github.com/zivid/zivid-python-samples/tree/master/source/applications/advanced/transform_point_cloud_from_millimeters_to_meters.py) - Transform point cloud data from millimeters to meters.
127133
- [transform\_point\_cloud\_via\_aruco\_marker](https://github.com/zivid/zivid-python-samples/tree/master/source/applications/advanced/transform_point_cloud_via_aruco_marker.py) - Transform a point cloud from camera to ArUco marker
128134
coordinate frame by estimating the marker's pose from the

modules/zividsamples/display.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,10 +90,12 @@ def copy_to_open3d_point_cloud(
9090
Returns:
9191
An Open3D PointCloud
9292
"""
93-
xyz = np.nan_to_num(xyz).reshape(-1, 3)
93+
if len(np.shape(xyz)) == 3:
94+
xyz = np.nan_to_num(xyz).reshape(-1, 3)
9495
if normals is not None:
9596
normals = np.nan_to_num(normals).reshape(-1, 3)
96-
rgb = rgb.reshape(-1, rgb.shape[-1])[:, :3]
97+
if len(np.shape(rgb)) == 3:
98+
rgb = rgb.reshape(-1, rgb.shape[-1])[:, :3]
9799

98100
open3d_point_cloud = o3d.geometry.PointCloud(o3d.utility.Vector3dVector(xyz))
99101
open3d_point_cloud.colors = o3d.utility.Vector3dVector(rgb / 255)
@@ -119,7 +121,7 @@ def display_open3d_point_cloud(open3d_point_cloud: o3d.geometry.PointCloud) -> N
119121
print(" 9: for point cloud colored by normals")
120122
print(" h: for all controls")
121123
visualizer.get_render_option().background_color = (0, 0, 0)
122-
visualizer.get_render_option().point_size = 1
124+
visualizer.get_render_option().point_size = 2
123125
visualizer.get_render_option().show_coordinate_frame = True
124126
visualizer.get_view_control().set_front([0, 0, -1])
125127
visualizer.get_view_control().set_up([0, -1, 0])
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
"""
2+
Stitch point clouds from a continuously rotating object without pre-alignment using Local Point Cloud Registration and apply Voxel Downsample.
3+
4+
It is assumed that the object is rotating around its own axis and the camera is stationary.
5+
The camera settings should have defined a region of interest box that removes unnecessary points, keeping only the object to be stitched.
6+
7+
Note: This example uses experimental SDK features, which may be modified, moved, or deleted in the future without notice.
8+
9+
"""
10+
11+
import argparse
12+
import time
13+
from pathlib import Path
14+
15+
import numpy as np
16+
import zivid
17+
from zivid.experimental.point_cloud_export import export_unorganized_point_cloud
18+
from zivid.experimental.point_cloud_export.file_format import PLY
19+
from zivid.experimental.toolbox.point_cloud_registration import (
20+
LocalPointCloudRegistrationParameters,
21+
local_point_cloud_registration,
22+
)
23+
from zividsamples.display import display_pointcloud
24+
25+
26+
def _options() -> argparse.Namespace:
27+
"""Function to read user arguments.
28+
29+
Returns:
30+
Arguments from user
31+
32+
"""
33+
parser = argparse.ArgumentParser(description=__doc__)
34+
35+
parser.add_argument(
36+
"--settings-path",
37+
required=True,
38+
type=Path,
39+
help="Path to the camera settings YML file",
40+
)
41+
42+
return parser.parse_args()
43+
44+
45+
def _main() -> None:
46+
user_options = _options()
47+
48+
app = zivid.Application()
49+
50+
print("Connecting to camera")
51+
camera = app.connect_camera()
52+
53+
settings_file = Path(user_options.settings_path)
54+
print(f"Loading settings from file: {settings_file}")
55+
settings = zivid.Settings.load(settings_file)
56+
57+
previous_to_current_point_cloud_transform = np.eye(4)
58+
unorganized_stitched_point_cloud = zivid.UnorganizedPointCloud()
59+
registration_params = LocalPointCloudRegistrationParameters()
60+
61+
for number_of_captures in range(20):
62+
time.sleep(1)
63+
frame = camera.capture_2d_3d(settings)
64+
unorganized_point_cloud = (
65+
frame.point_cloud().to_unorganized_point_cloud().voxel_downsampled(voxel_size=1.0, min_points_per_voxel=2)
66+
)
67+
68+
if number_of_captures != 0:
69+
local_point_cloud_registration_result = local_point_cloud_registration(
70+
target=unorganized_stitched_point_cloud,
71+
source=unorganized_point_cloud,
72+
parameters=registration_params,
73+
initial_transform=previous_to_current_point_cloud_transform,
74+
)
75+
if not local_point_cloud_registration_result.converged():
76+
print("Registration did not converge...")
77+
continue
78+
previous_to_current_point_cloud_transform = local_point_cloud_registration_result.transform().to_matrix()
79+
80+
unorganized_stitched_point_cloud.transform(np.linalg.inv(previous_to_current_point_cloud_transform))
81+
unorganized_stitched_point_cloud.extend(unorganized_point_cloud)
82+
83+
print(f"Captures done: {number_of_captures}")
84+
85+
print("Voxel-downsampling the stitched point cloud")
86+
unorganized_stitched_point_cloud = unorganized_stitched_point_cloud.voxel_downsampled(
87+
voxel_size=0.75, min_points_per_voxel=2
88+
)
89+
90+
display_pointcloud(
91+
xyz=unorganized_stitched_point_cloud.copy_data("xyz"),
92+
rgb=unorganized_stitched_point_cloud.copy_data("rgba")[:, 0:3],
93+
)
94+
95+
file_name = Path(__file__).parent / "StitchedPointCloudOfRotatingObject.ply"
96+
export_unorganized_point_cloud(unorganized_stitched_point_cloud, PLY(str(file_name), layout=PLY.Layout.unordered))
97+
98+
99+
if __name__ == "__main__":
100+
_main()
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
"""
2+
Stitch two point clouds using a transformation estimated by Local Point Cloud Registration and apply Voxel Downsample.
3+
4+
The ZDF files for this sample can be found in Zivid's Sample Data, under the main instructions for Zivid samples.
5+
Zivid's Sample Data can be downloaded from https://support.zivid.com/en/latest/api-reference/samples/sample-data.html.
6+
7+
Note: This example uses experimental SDK features, which may be modified, moved, or deleted in the future without notice.
8+
9+
"""
10+
11+
import zivid
12+
from zivid.experimental.toolbox.point_cloud_registration import (
13+
LocalPointCloudRegistrationParameters,
14+
local_point_cloud_registration,
15+
)
16+
from zividsamples.display import display_pointcloud
17+
from zividsamples.paths import get_sample_data_path
18+
19+
20+
def _main() -> None:
21+
zivid.Application()
22+
23+
print("Reading point clouds from files")
24+
directory = get_sample_data_path() / "StitchingPointClouds" / "BlueObject"
25+
frame_1 = zivid.Frame(directory / "BlueObject.zdf")
26+
frame_2 = zivid.Frame(directory / "BlueObjectSlightlyMoved.zdf")
27+
28+
print("Converting organized point clouds to unorganized point clouds and voxel downsampling")
29+
unorganized_point_cloud_1 = frame_1.point_cloud().to_unorganized_point_cloud()
30+
unorganized_point_cloud_2 = frame_2.point_cloud().to_unorganized_point_cloud()
31+
32+
print("Displaying point clouds before stitching")
33+
unorganized_not_stitched_point_cloud = zivid.UnorganizedPointCloud()
34+
unorganized_not_stitched_point_cloud.extend(unorganized_point_cloud_1)
35+
unorganized_not_stitched_point_cloud.extend(unorganized_point_cloud_2)
36+
display_pointcloud(
37+
xyz=unorganized_not_stitched_point_cloud.copy_data("xyz"),
38+
rgb=unorganized_not_stitched_point_cloud.copy_data("rgba")[:, 0:3],
39+
)
40+
41+
print("Estimating transformation between point clouds")
42+
unorganized_point_cloud_1_lpcr = unorganized_point_cloud_1.voxel_downsampled(voxel_size=1.0, min_points_per_voxel=3)
43+
unorganized_point_cloud_2_lpcr = unorganized_point_cloud_2.voxel_downsampled(voxel_size=1.0, min_points_per_voxel=3)
44+
registration_params = LocalPointCloudRegistrationParameters()
45+
local_point_cloud_registration_result = local_point_cloud_registration(
46+
target=unorganized_point_cloud_1_lpcr, source=unorganized_point_cloud_2_lpcr, parameters=registration_params
47+
)
48+
assert local_point_cloud_registration_result.converged(), "Registration did not converge..."
49+
point_cloud_1_to_point_cloud_2_transform = local_point_cloud_registration_result.transform()
50+
51+
print("Displaying point clouds after stitching")
52+
final_point_cloud = zivid.UnorganizedPointCloud()
53+
final_point_cloud.extend(unorganized_point_cloud_1)
54+
unorganized_point_cloud_2_transformed = unorganized_point_cloud_2.transformed(
55+
point_cloud_1_to_point_cloud_2_transform.to_matrix()
56+
)
57+
final_point_cloud.extend(unorganized_point_cloud_2_transformed)
58+
display_pointcloud(
59+
xyz=final_point_cloud.copy_data("xyz"),
60+
rgb=final_point_cloud.copy_data("rgba")[:, 0:3],
61+
)
62+
63+
print("Voxel-downsampling the stitched point cloud")
64+
final_point_cloud = final_point_cloud.voxel_downsampled(voxel_size=2.0, min_points_per_voxel=1)
65+
display_pointcloud(
66+
xyz=final_point_cloud.copy_data("xyz"),
67+
rgb=final_point_cloud.copy_data("rgba")[:, 0:3],
68+
)
69+
70+
71+
if __name__ == "__main__":
72+
_main()

0 commit comments

Comments
 (0)