Skip to content

Commit 5c2b0ca

Browse files
YLouWashUmeta-codesync[bot]
authored andcommitted
{Documentation} Add sample data links.
Summary: This diff adds gen2 sample data link to the notebook and tutorials. It also slightly changed how `notebook_show()` is called, it should be at the end of the cell block. Reviewed By: kongchen1992 Differential Revision: D84948289 fbshipit-source-id: b0b91de7015a5882e343f30d34c857ffb261d174
1 parent b59e4f6 commit 5c2b0ca

14 files changed

+122
-47
lines changed

examples/Gen2/python_notebooks/Tutorial_1_vrs_data_provider_basics.ipynb

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,12 @@
2525
"- Retrieve data using either sequential (index-based) or temporal (timestamp-based) access APIs. \n",
2626
"- Learn about timing domains and time query options\n",
2727
"\n",
28-
"**Prerequisites:** Basic Python knowledge and a general understanding of multimodal sensor data."
28+
"**Prerequisites:** \n",
29+
"- Basic Python knowledge and a general understanding of multimodal sensor data.\n",
30+
"- Download Aria Gen2 sample data from [link](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs)\n",
31+
"\n",
32+
"**Note on Visualization**\n",
33+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell."
2934
]
3035
},
3136
{
@@ -148,7 +153,6 @@
148153
"# Visualize with Rerun\n",
149154
"import rerun as rr\n",
150155
"rr.init(\"rerun_viz_query_by_index\")\n",
151-
"rr.notebook_show()\n",
152156
"\n",
153157
"# Get number of samples in stream\n",
154158
"num_samples = vrs_data_provider.get_num_data(rgb_stream_id)\n",
@@ -169,7 +173,9 @@
169173
" # Process image data\n",
170174
" if image_data.is_valid():\n",
171175
" rr.set_time_nanos(\"device_time\", timestamp_ns)\n",
172-
" rr.log(\"camera_rgb\", rr.Image(image_data.to_numpy_array()))\n"
176+
" rr.log(\"camera_rgb\", rr.Image(image_data.to_numpy_array()))\n",
177+
"\n",
178+
"rr.notebook_show()"
173179
]
174180
},
175181
{
@@ -231,7 +237,6 @@
231237
"from projectaria_tools.core.sensor_data import TimeDomain, TimeQueryOptions\n",
232238
"\n",
233239
"rr.init(\"rerun_viz_query_by_timestamp\")\n",
234-
"rr.notebook_show()\n",
235240
"\n",
236241
"# Get time bounds for RGB images\n",
237242
"first_timestamp_ns = vrs_data_provider.get_first_time_ns(rgb_stream_id, TimeDomain.DEVICE_TIME)\n",
@@ -270,7 +275,9 @@
270275
" rr.set_time_nanos(\"device_time\", capture_time_ns)\n",
271276
" rr.log(label, rr.Image(image_data.to_numpy_array()))\n",
272277
"\n",
273-
" query_timestamp_ns = query_timestamp_ns + int(1e9) # 1 second\n"
278+
" query_timestamp_ns = query_timestamp_ns + int(1e9) # 1 second\n",
279+
"\n",
280+
"rr.notebook_show()"
274281
]
275282
}
276283
],

examples/Gen2/python_notebooks/Tutorial_2_device_calibration.ipynb

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,11 @@
2525
"- Multi-sensor coordination and sensor poses, and the concept of the \"Device\" frame. \n",
2626
"\n",
2727
"**Pre-requisite:** \n",
28-
"Familiarity with VRS basics from `Tutorial_1_vrs_Data_provider_basics.ipynb`."
28+
"- Familiarity with VRS basics from `Tutorial_1_vrs_Data_provider_basics.ipynb`.\n",
29+
"- Download Aria Gen2 sample data from [link](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs)\n",
30+
"\n",
31+
"**Note on Visualization**\n",
32+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell."
2933
]
3034
},
3135
{
@@ -315,7 +319,6 @@
315319
"import rerun as rr\n",
316320
"\n",
317321
"rr.init(\"rerun_viz_image_undistortion\")\n",
318-
"rr.notebook_show()\n",
319322
"\n",
320323
"# We already obtained RGB camera calibration as `camera_alib`.\n",
321324
"# Now, create a linear camera model that is similar to camera_calib\n",
@@ -350,7 +353,8 @@
350353
" dstCalib=linear_camera_model,\n",
351354
" srcCalib=camera_calib,\n",
352355
" )\n",
353-
" rr.log(\"undistorted_camera_rgb\", rr.Image(undistorted_image))"
356+
" rr.log(\"undistorted_camera_rgb\", rr.Image(undistorted_image))\n",
357+
"rr.notebook_show()"
354358
]
355359
},
356360
{
@@ -448,7 +452,6 @@
448452
"\n",
449453
"\n",
450454
"rr.init(\"rerun_viz_imu_rectification\")\n",
451-
"rr.notebook_show()\n",
452455
"\n",
453456
"imu_label = \"imu-right\"\n",
454457
"imu_calib = device_calib.get_imu_calib(imu_label)\n",
@@ -489,7 +492,9 @@
489492
" _plot_imu_signals(imu_data.accel_msec2, imu_data.gyro_radsec, \"imu_right\")\n",
490493
"\n",
491494
" # Plot compensated IMU readings in a separate plot\n",
492-
" _plot_imu_signals(compensated_accel, compensated_gyro, \"imu_right_compensated\")\n"
495+
" _plot_imu_signals(compensated_accel, compensated_gyro, \"imu_right_compensated\")\n",
496+
"\n",
497+
"rr.notebook_show()\n"
493498
]
494499
},
495500
{
@@ -532,7 +537,6 @@
532537
")\n",
533538
"\n",
534539
"rr.init(\"rerun_viz_sensor_extrinsics\")\n",
535-
"rr.notebook_show()\n",
536540
"\n",
537541
"# Obtain a glass outline for visualization. This outline uses factory calibration extrinsics if possible, uses CAD extrinsics if factory calibration is not available.\n",
538542
"glass_outline = AriaGlassesOutline(device_calib, use_cad_calib=False)\n",
@@ -575,7 +579,9 @@
575579
" focal_length=float(camera_calibration.get_focal_lengths()[0]),\n",
576580
" ),\n",
577581
" static=True,\n",
578-
" )\n"
582+
" )\n",
583+
"\n",
584+
"rr.notebook_show()"
579585
]
580586
}
581587
],

examples/Gen2/python_notebooks/Tutorial_3_sequential_access_multi_sensor_data.ipynb

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,11 @@
2020
"4. Apply frame rate subsampling for efficient processing\n",
2121
"\n",
2222
"**Prerequisites**\n",
23-
"- Complete Tutorial 1 (VrsDataProvider Basics) to understand basic data provider concepts"
23+
"- Complete Tutorial 1 (VrsDataProvider Basics) to understand basic data provider concepts\n",
24+
"- Download Aria Gen2 sample data from [link](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs)\n",
25+
"\n",
26+
"**Note on Visualization**\n",
27+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell."
2428
]
2529
},
2630
{
@@ -223,7 +227,6 @@
223227
"print(f\"Start visualizing customized sensor data queue\")\n",
224228
"\n",
225229
"rr.init(\"rerun_viz_customized_sensor_data_queue\")\n",
226-
"rr.notebook_show()\n",
227230
"\n",
228231
"for sensor_data in vrs_data_provider.deliver_queued_sensor_data(customized_deliver_options):\n",
229232
" stream_id = sensor_data.stream_id()\n",
@@ -234,7 +237,9 @@
234237
"\n",
235238
" # Visualize\n",
236239
" rr.set_time_nanos(\"device_time\", device_time_ns)\n",
237-
" rr.log(stream_label, rr.Image(image_data_and_record[0].to_numpy_array()))\n"
240+
" rr.log(stream_label, rr.Image(image_data_and_record[0].to_numpy_array()))\n",
241+
"\n",
242+
"rr.notebook_show()"
238243
]
239244
},
240245
{

examples/Gen2/python_notebooks/Tutorial_4_on_device_eyetracking_handtracking.ipynb

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,11 @@
2424
"\n",
2525
"**Prerequisites**\n",
2626
"- Complete Tutorial 1 (VrsDataProvider Basics) to understand basic data provider concepts\n",
27-
"- Complete Tutorial 2 (Device Calibration) to understand how to properly use calibration in Aria data. "
27+
"- Complete Tutorial 2 (Device Calibration) to understand how to properly use calibration in Aria data.\n",
28+
"- Download Aria Gen2 sample data from [link](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs)\n",
29+
"\n",
30+
"**Note on Visualization**\n",
31+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell, or restart the python kernel. "
2832
]
2933
},
3034
{
@@ -207,7 +211,6 @@
207211
"rgb_camera_calib = device_calib.get_camera_calib(rgb_camera_label)\n",
208212
"\n",
209213
"rr.init(\"rerun_viz_et_in_cameras\")\n",
210-
"rr.notebook_show()\n",
211214
"\n",
212215
"# Set up a sensor queue with only RGB image + EyeGaze\n",
213216
"deliver_options = vrs_data_provider.get_default_deliver_queued_options()\n",
@@ -249,7 +252,9 @@
249252
"\n",
250253
" # Plot Eyegaze overlay on top of camera images\n",
251254
" rr.set_time_nanos(\"device_time\", device_time_ns)\n",
252-
" plot_eyegaze_in_camera(eyegaze_data = eye_gaze, camera_label = rgb_camera_label, camera_calib = rgb_camera_calib, T_device_cpf = T_device_cpf)\n"
255+
" plot_eyegaze_in_camera(eyegaze_data = eye_gaze, camera_label = rgb_camera_label, camera_calib = rgb_camera_calib, T_device_cpf = T_device_cpf)\n",
256+
"\n",
257+
"rr.notebook_show()"
253258
]
254259
},
255260
{
@@ -536,7 +541,6 @@
536541
"slam_stream_ids = [vrs_data_provider.get_stream_id_from_label(label) for label in slam_camera_labels]\n",
537542
"\n",
538543
"rr.init(\"rerun_viz_ht_in_cameras\")\n",
539-
"rr.notebook_show()\n",
540544
"\n",
541545
"# Set up a sensor queue with only RGB images.\n",
542546
"# Handtracking data will be queried with interpolated API.\n",
@@ -576,7 +580,9 @@
576580
"\n",
577581
"# Wait for rerun to buffer 1 second of data\n",
578582
"import time\n",
579-
"time.sleep(1)"
583+
"time.sleep(1)\n",
584+
"\n",
585+
"rr.notebook_show()"
580586
]
581587
},
582588
{

examples/Gen2/python_notebooks/Tutorial_5_on_device_vio.ipynb

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,12 @@
2323
"\n",
2424
"**Prerequisites**\n",
2525
"- Complete Tutorial 1 (VrsDataProvider Basics) to understand basic data provider concepts\n",
26-
"- Complete Tutorial 2 (Device Calibration) to understand how to properly use calibration in Aria data. "
26+
"- Complete Tutorial 2 (Device Calibration) to understand how to properly use calibration in Aria data.\n",
27+
"- Download Aria Gen2 sample data from [link](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs)\n",
28+
"\n",
29+
"\n",
30+
"**Note on visualization:**\n",
31+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell."
2732
]
2833
},
2934
{
@@ -301,7 +306,6 @@
301306
"print(\"\\n=== Visualizing on-device VIO trajectory + HandTracking in 3D view ===\")\n",
302307
"\n",
303308
"rr.init(\"rerun_viz_vio_trajectory\")\n",
304-
"rr.notebook_show()\n",
305309
"\n",
306310
"device_calib = vrs_data_provider.get_device_calibration()\n",
307311
"handtracking_stream_id = vrs_data_provider.get_stream_id_from_label(\"handtracking\")\n",
@@ -388,7 +392,9 @@
388392
" # For visualization purpose, also plot the hand tracking results\n",
389393
" interpolated_hand_pose = vrs_data_provider.get_interpolated_hand_pose_data(handtracking_stream_id, vio_data.capture_timestamp_ns, TimeDomain.DEVICE_TIME)\n",
390394
" if interpolated_hand_pose is not None:\n",
391-
" plot_hand_pose_data_3d(hand_pose_data = interpolated_hand_pose)\n"
395+
" plot_hand_pose_data_3d(hand_pose_data = interpolated_hand_pose)\n",
396+
"\n",
397+
"rr.notebook_show()"
392398
]
393399
},
394400
{

examples/Gen2/python_notebooks/Tutorial_6_timestamp_alignment_in_aria_gen2.ipynb

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,12 @@
2222
"\n",
2323
"**Prerequisites**\n",
2424
"- Complete Tutorial 1 (VrsDataProvider Basics) to understand basic data provider concepts\n",
25-
"- Complete Tutorial 3 (Sequential Access multi-sensor data) to understand how to create a queue of sensor data from VRS file."
25+
"- Complete Tutorial 3 (Sequential Access multi-sensor data) to understand how to create a queue of sensor data from VRS file.\n",
26+
"- Download Aria Gen2 sample data: [host recording](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_host_1.vrs) and [client recording](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_client_1.vrs)\n",
27+
"\n",
28+
"\n",
29+
"**Note on visualization:**\n",
30+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell, or restart the Python kernel. "
2631
]
2732
},
2833
{
@@ -164,7 +169,6 @@
164169
"\n",
165170
"print(\"=== Single VRS timestamp-based query visualization examples ===\")\n",
166171
"rr.init(\"rerun_viz_single_vrs_timestamp_based_query\")\n",
167-
"rr.notebook_show()\n",
168172
"\n",
169173
"# Select RGB and SLAM stream IDs to visualize\n",
170174
"all_labels = vrs_data_provider.get_device_calibration().get_camera_labels()\n",
@@ -202,7 +206,9 @@
202206
" rr.log(single_slam_label, rr.Image(slam_image_data.to_numpy_array()))\n",
203207
"\n",
204208
" # Increment query timestamp\n",
205-
" current_timestamp_ns += target_period_ns\n"
209+
" current_timestamp_ns += target_period_ns\n",
210+
"\n",
211+
"rr.notebook_show()"
206212
]
207213
},
208214
{
@@ -279,7 +285,6 @@
279285
"source": [
280286
"print(\"======= Multi-VRS time mapping example: Query APIs ======\")\n",
281287
"rr.init(\"rerun_viz_multi_vrs_time_mapping\")\n",
282-
"rr.notebook_show()\n",
283288
"\n",
284289
"# Set up sensor queue options in host VRS, only turn on RGB stream\n",
285290
"host_deliver_options = host_data_provider.get_default_deliver_queued_options()\n",
@@ -328,7 +333,9 @@
328333
" rr.set_time_nanos(\"device_time\", converted_client_timestamp_ns)\n",
329334
"\n",
330335
" # Plot client image\n",
331-
" rr.log(\"rgb_image_in_client\", rr.Image(client_image_data.to_numpy_array()))"
336+
" rr.log(\"rgb_image_in_client\", rr.Image(client_image_data.to_numpy_array()))\n",
337+
"\n",
338+
"rr.notebook_show()"
332339
]
333340
}
334341
],
@@ -348,7 +355,7 @@
348355
"name": "python",
349356
"nbconvert_exporter": "python",
350357
"pygments_lexer": "ipython3",
351-
"version": "3.12.11"
358+
"version": "3.12.10"
352359
}
353360
},
354361
"nbformat": 4,

examples/Gen2/python_notebooks/Tutorial_7_mps_data_provider_basics.ipynb

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,11 @@
2626
"\n",
2727
"**Prerequisites**\n",
2828
"- Complete Tutorial 1 (VrsDataProvider Basics) to understand basic data provider concepts\n",
29-
"- Complete Tutorial 2 (Device Calibration) to understand how to properly use calibration in Aria data."
29+
"- Complete Tutorial 2 (Device Calibration) to understand how to properly use calibration in Aria data.\n",
30+
"- Download Aria Gen2 sample data: [VRS](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs) and [MPS output zip file](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1_mps_output.zip).\n",
31+
"\n",
32+
"**Note on Visualization**\n",
33+
"If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell."
3034
]
3135
},
3236
{
@@ -372,7 +376,6 @@
372376
"\n",
373377
"# Initialize Rerun\n",
374378
"rr.init(\"MPS SLAM Visualization\")\n",
375-
"rr.notebook_show() # open the in-notebook viewer first, then stream logs\n",
376379
"\n",
377380
"# Set up the 3D scene\n",
378381
"rr.log(\"world\", rr.ViewCoordinates.RIGHT_HAND_Z_UP, static=True)\n",
@@ -463,7 +466,9 @@
463466
" radii=5e-3,\n",
464467
" ),\n",
465468
" static=False,\n",
466-
" )"
469+
" )\n",
470+
" \n",
471+
"rr.notebook_show() "
467472
]
468473
}
469474
],
@@ -483,7 +488,7 @@
483488
"name": "python",
484489
"nbconvert_exporter": "python",
485490
"pygments_lexer": "ipython3",
486-
"version": "3.12.11"
491+
"version": "3.12.10"
487492
}
488493
},
489494
"nbformat": 4,

website/docs-research-tools/projectariatools/pythontutorials/calibration.mdx

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,11 @@ Most sensors in Aria glasses are calibrated both extrinsically and intrinsically
2424
- Multi-sensor coordination and sensor poses, and the concept of the "Device" frame.
2525

2626
**Pre-requisite:**
27-
Familiarity with VRS basics from `Tutorial_1_vrs_Data_provider_basics.ipynb`.
27+
- Familiarity with VRS basics from `Tutorial_1_vrs_Data_provider_basics.ipynb`.
28+
- Download Aria Gen2 sample data from [link](https://www.projectaria.com/async/sample/download/?bucket=core&filename=aria_gen2_sample_data_1.vrs)
29+
30+
**Note on Visualization**
31+
If visualization window is not showing up, this is due to `Rerun` lib's caching issue. Just rerun the specific code cell.
2832

2933
### Obtaining Device Calibration Content
3034

@@ -223,7 +227,6 @@ import rerun as rr
223227
from projectaria_tools.core import calibration
224228

225229
rr.init("rerun_viz_image_undistortion")
226-
rr.notebook_show()
227230

228231
# We already obtained RGB camera calibration as `camera_calib`.
229232
# Now, create a linear camera model that is similar to camera_calib
@@ -259,6 +262,8 @@ for i in range(first_few):
259262
srcCalib=camera_calib,
260263
)
261264
rr.log("undistorted_camera_rgb", rr.Image(undistorted_image))
265+
266+
rr.notebook_show()
262267
```
263268

264269
### IMU Intrinsics: Measurement Rectification
@@ -347,7 +352,6 @@ def _plot_imu_signals(accel_data, gyro_data, rerun_plot_label):
347352

348353

349354
rr.init("rerun_viz_imu_rectification")
350-
rr.notebook_show()
351355

352356
imu_label = "imu-right"
353357
imu_calib = device_calib.get_imu_calib(imu_label)
@@ -389,6 +393,8 @@ for i in range(0, first_few, 50):
389393

390394
# Plot compensated IMU readings in a separate plot
391395
_plot_imu_signals(compensated_accel, compensated_gyro, "imu_right_compensated")
396+
397+
rr.notebook_show()
392398
```
393399

394400
### 5. Accessing Sensor Extrinsics
@@ -419,7 +425,6 @@ from projectaria_tools.utils.rerun_helpers import (
419425
)
420426

421427
rr.init("rerun_viz_sensor_extrinsics")
422-
rr.notebook_show()
423428

424429
# Obtain a glass outline for visualization. This outline uses factory calibration extrinsics if possible, uses CAD extrinsics if factory calibration is not available.
425430
glass_outline = AriaGlassesOutline(device_calib, use_cad_calib=False)
@@ -463,4 +468,5 @@ for sensor in sensor_labels:
463468
),
464469
static=True,
465470
)
471+
rr.notebook_show()
466472
```

0 commit comments

Comments
 (0)