Skip to content

Commit

Permalink
update rerun tutorial
Browse files Browse the repository at this point in the history
  • Loading branch information
Victorlouisdg committed Feb 12, 2024
1 parent e371e0f commit 13562c3
Showing 1 changed file with 40 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"metadata": {},
"source": [
"# Rerun RGBD camera example\n",
"This example shows how to use [rerun](https://www.rerun.io/) for logging images, image annotations, point clouds, transforms between elements in the world, time-series in a way that you might want to do for a robotic manipulation setup. Make sure you have a **ZED2I camera connected**.\n",
"This example shows how to use [rerun](https://www.rerun.io/) for logging images, image annotations, point clouds, transforms between elements in the world, time-series in a way that you might want to do for a robotic manipulation setup. Make sure you have a **Realsense or ZED2I camera connected**.\n",
"\n",
"\n",
"Rerun has more features such as logging meshes, logging 3D bboxes, URDFs (in process). Check the docs to learn more. \n",
Expand All @@ -26,7 +26,6 @@
},
"outputs": [],
"source": [
"from airo_camera_toolkit.cameras.zed.zed2i import Zed2i\n",
"import rerun as rr\n",
"#autoreload\n",
"%load_ext autoreload\n",
Expand All @@ -41,7 +40,19 @@
"source": [
"# start rerun. If the UI is already running, it will connect to it. Otherwise it will start a new UI and connect to it.\n",
"# you can also start rerun using `python -m rerun`.\n",
"rr.init(\"test\",spawn=True)"
"rr.init(\"test\", spawn=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from airo_camera_toolkit.cameras.realsense.realsense import Realsense\n",
"\n",
"realsense = Realsense()\n",
"camera = realsense"
]
},
{
Expand All @@ -50,7 +61,10 @@
"metadata": {},
"outputs": [],
"source": [
"zed = Zed2i(depth_mode=Zed2i.NEURAL_DEPTH_MODE, resolution=Zed2i.RESOLUTION_2K)"
"from airo_camera_toolkit.cameras.zed.zed2i import Zed2i\n",
"\n",
"zed = Zed2i(depth_mode=Zed2i.NEURAL_DEPTH_MODE, resolution=Zed2i.RESOLUTION_2K)\n",
"camera = zed"
]
},
{
Expand All @@ -59,9 +73,9 @@
"metadata": {},
"outputs": [],
"source": [
"point_cloud = zed.get_colored_point_cloud()\n",
"rgb = zed.get_rgb_image()\n",
"depth = zed.get_depth_image()"
"point_cloud = camera.get_colored_point_cloud()\n",
"rgb = camera.get_rgb_image()\n",
"depth = camera.get_depth_image()"
]
},
{
Expand All @@ -71,7 +85,7 @@
"outputs": [],
"source": [
"# log the colored point_cloud to the UI\n",
"rr.log(\"world/camera1/point_cloud\", rr.Points3D(positions=point_cloud.points,colors=point_cloud.colors))\n"
"rr.log(\"world/camera1/point_cloud\", rr.Points3D(positions=point_cloud.points, colors=point_cloud.colors))"
]
},
{
Expand All @@ -90,8 +104,8 @@
"outputs": [],
"source": [
"# log the rgb and depth images to the UI in a new image/camera1 space, this will automatically become a 2D image viewer.\n",
"rr.log(\"image/camera1/rgb\",rr.Image(rgb))\n",
"rr.log(\"image/camera1/depth\",rr.Image(depth))"
"rr.log(\"image/camera1/rgb\", rr.Image(rgb))\n",
"rr.log(\"image/camera1/depth\", rr.Image(depth))"
]
},
{
Expand All @@ -100,8 +114,8 @@
"metadata": {},
"outputs": [],
"source": [
"# log a dummy 2D bbox \n",
"rr.log(\"image/camera1/rect\", rr.Boxes2D(mins=[20,100], sizes=[300,500]))"
"# log a dummy 2D bbox\n",
"rr.log(\"image/camera1/rect\", rr.Boxes2D(mins=[20, 100], sizes=[300, 500]))"
]
},
{
Expand All @@ -111,7 +125,10 @@
"outputs": [],
"source": [
"# log some dummy keypoints and attach labels\n",
"rr.log(\"image/camera1/keypoints\", rr.Points2D([[600,500],[400,500]],keypoint_ids=[0,1],radii=20, labels=[\"keypoint1\",\"keypoint2\"]))"
"rr.log(\n",
" \"image/camera1/keypoints\",\n",
" rr.Points2D([[600, 500], [400, 500]], keypoint_ids=[0, 1], radii=20, labels=[\"keypoint1\", \"keypoint2\"]),\n",
")"
]
},
{
Expand All @@ -120,10 +137,10 @@
"metadata": {},
"outputs": [],
"source": [
"# log dummy camera extrinsics from the world space. \n",
"# log dummy camera extrinsics from the world space.\n",
"# This specifies the pose of camera in world.\n",
"translation = [0,0.5,0.7] \n",
"rotation = [-0.707,0,0,0.707] #scalar-last! \n",
"translation = [0, 0.5, 0.7]\n",
"rotation = [-0.707, 0, 0, 0.707] # scalar-last!\n",
"# rr.log_rigid3(\"world/camera1\", parent_from_child=(translation, rotation))\n",
"rr.log(\"world/camera1\", rr.Transform3D(translation=translation, rotation=rotation))"
]
Expand All @@ -135,8 +152,8 @@
"outputs": [],
"source": [
"# log the actual camera intrinsics, to create a pinhole camera model in the UI.\n",
"rr.log(\"world/camera1/rgb\", rr.Pinhole(image_from_camera=zed.intrinsics_matrix(), resolution=zed.resolution))\n",
"rr.log(\"world/camera1/rgb\",rr.Image(rgb))"
"rr.log(\"world/camera1/rgb\", rr.Pinhole(image_from_camera=camera.intrinsics_matrix(), resolution=camera.resolution))\n",
"rr.log(\"world/camera1/rgb\", rr.Image(rgb))"
]
},
{
Expand All @@ -145,7 +162,7 @@
"metadata": {},
"outputs": [],
"source": [
"# set up the 'view' of the 3D world space. This is for convenience so that rerun can sensible starting orientations for the spaces. \n",
"# set up the 'view' of the 3D world space. This is for convenience so that rerun can sensible starting orientations for the spaces.\n",
"rr.log(\"world\", rr.ViewCoordinates.RIGHT_HAND_Z_UP, timeless=True)\n",
"rr.log(\"world/camera1\", rr.ViewCoordinates.RDF, timeless=True)"
]
Expand All @@ -158,8 +175,8 @@
"source": [
"# log some more data\n",
"for _ in range(5):\n",
" point_cloud = zed.get_colored_point_cloud()\n",
" rr.log(\"world/camera1/point_cloud\", rr.Points3D(positions=point_cloud.points,colors=point_cloud.colors))"
" point_cloud = camera.get_colored_point_cloud()\n",
" rr.log(\"world/camera1/point_cloud\", rr.Points3D(positions=point_cloud.points, colors=point_cloud.colors))"
]
},
{
Expand All @@ -170,17 +187,7 @@
"source": [
"# log time-series (e.g. F/T sensor)\n",
"for i in range(100):\n",
" rr.log(\"world/robot/force\",rr.TimeSeriesScalar(i))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# close the camera\n",
"zed.camera.close()"
" rr.log(\"world/robot/force\", rr.TimeSeriesScalar(i))"
]
},
{
Expand Down Expand Up @@ -210,7 +217,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.13"
"version": "3.9.18"
},
"vscode": {
"interpreter": {
Expand Down

0 comments on commit 13562c3

Please sign in to comment.