Tutorial - Using Camera Tracking This tutorials shows how to get the position and orientation of the camera in real-time. The program will loop until 1000 positions are grabbed. We assume that you have followed the previous tutorials. Getting Started First, download the latest version of the ZED SDK. Download the Positional Tracking sample code in C++, Python or C#. Code Overview Open the camera As in previous tutorials, we create, configure and open the ZED. C++ Python C# // Create a ZED camera object Camera zed; // Set configuration parameters InitParameters init_params; init_params.camera_resolution = RESOLUTION::HD720; // Use HD720 video mode (default fps: 60) init_params.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; // Use a right-handed Y-up coordinate system init_params.coordinate_units = UNIT::METER; // Set units in meters // Open the camera ERROR_CODE err = zed.open(init_params); if (err != ERROR_CODE::SUCCESS) exit(-1); # Create a ZED camera object zed = sl.Camera() # Set configuration parameters init_params = sl.InitParameters() init_params.camera_resolution = sl.RESOLUTION.HD720 # Use HD720 video mode (default fps: 60) # Use a right-handed Y-up coordinate system init_params.coordinate_system = sl.COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP init_params.coordinate_units = sl.UNIT.METER # Set units in meters # Open the camera err = zed.open(init_params) if err != sl.ERROR_CODE.SUCCESS: exit(1) // Create a ZED camera object Camera zed = new Camera(0); // Set configuration parameters InitParameters init_params = new InitParameters(); init_params.resolution = RESOLUTION.HD720; // Use HD720 video mode (default fps: 60) init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; // Use a right-handed Y-up coordinate system init_params.coordinateUnits = UNIT.METER; // Set units in meters // Open the camera ERROR_CODE err = zed.Open(ref init_params); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); Enable positional tracking Once the camera is opened, we must enable the positional tracking module with enablePositionalTracking() in order to get the position and orientation of the ZED. C++ Python C# // Enable positional tracking with default parameters PositionalTrackingParameters tracking_parameters; err = zed.enablePositionalTracking(tracking_parameters); if (err != ERROR_CODE::SUCCESS) exit(-1); # Enable positional tracking with default parameters tracking_parameters = sl.PositionalTrackingParameters() err = zed.enable_positional_tracking(tracking_parameters) if err != sl.ERROR_CODE.SUCCESS: exit(1) // Enable positional tracking with default parameters PositionalTrackingParameters trackingParameters = new PositionalTrackingParameters(); err = zed.EnablePositionalTracking(ref trackingParameters); if (err != ERROR_CODE.SUCCESS) Environment.Exit(-1); In the above example, we use the default tracking parameters set in the ZED SDK. For the list of available parameters, check the Tracking API docs. Capture pose data Now that motion tracking is enabled, we create a loop to grab and retrieve the camera position. The camera position is given by the class Pose. This class contains the translation and orientation of the camera, as well as image timestamp and tracking confidence. A pose is always linked to a reference frame. The SDK provides two reference frames : REFERENCE_FRAME::WORLD and REFERENCE_FRAME::CAMERA. For more information, see the Coordinate Frames section. In this tutorial, we retrieve the camera position in the World Frame. C++ Python C# // Track the camera position during 1000 frames int i = 0; sl::Pose zed_pose; while (i < 1000) { if (zed.grab() == ERROR_CODE::SUCCESS) { // Get the pose of the left eye of the camera with reference to the world frame zed.getPosition(zed_pose, REFERENCE_FRAME::WORLD); // Display the translation and timestamp printf("Translation: Tx: %.3f, Ty: %.3f, Tz: %.3f, Timestamp: %llu\n", zed_pose.getTranslation().tx, zed_pose.getTranslation().ty, zed_pose.getTranslation().tz, zed_pose.timestamp); // Display the orientation quaternion printf("Orientation: Ox: %.3f, Oy: %.3f, Oz: %.3f, Ow: %.3f\n\n", zed_pose.getOrientation().ox, zed_pose.getOrientation().oy, zed_pose.getOrientation().oz, zed_pose.getOrientation().ow); i++; } } # Track the camera position during 1000 frames i = 0 zed_pose = sl.Pose() runtime_parameters = sl.RuntimeParameters() while i < 1000: if zed.grab(runtime_parameters) == sl.ERROR_CODE.SUCCESS: # Get the pose of the left eye of the camera with reference to the world frame zed.get_position(zed_pose, sl.REFERENCE_FRAME.WORLD) # Display the translation and timestamp py_translation = sl.Translation() tx = round(zed_pose.get_translation(py_translation).get()[0], 3) ty = round(zed_pose.get_translation(py_translation).get()[1], 3) tz = round(zed_pose.get_translation(py_translation).get()[2], 3) print("Translation: Tx: {0}, Ty: {1}, Tz {2}, Timestamp: {3}\n".format(tx, ty, tz, zed_pose.timestamp.get_milliseconds())) # Display the orientation quaternion py_orientation = sl.Orientation() ox = round(zed_pose.get_orientation(py_orientation).get()[0], 3) oy = round(zed_pose.get_orientation(py_orientation).get()[1], 3) oz = round(zed_pose.get_orientation(py_orientation).get()[2], 3) ow = round(zed_pose.get_orientation(py_orientation).get()[3], 3) print("Orientation: Ox: {0}, Oy: {1}, Oz {2}, Ow: {3}\n".format(ox, oy, oz, ow)) // Track the camera position during 1000 frames int i = 0; sl.Pose pose = new sl.Pose(); RuntimeParameters runtimeParameters = new RuntimeParameters(); while (i < 1000) { if (zed.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { // Get the pose of the left eye of the camera with reference to the world frame zed.GetPosition(ref pose,REFERENCE_FRAME.WORLD); // Display the translation and timestamp Console.WriteLine("Translation : " + pose.translation + ", Rotation : " + pose.rotation + ", Timestamp : " + pose.timestamp); //Display the orientation quaternion Console.WriteLine("Rotation : " + pose.rotation); i++; } } Inertial Data If an IMU is available (ex: ZED 2, ZED Mini), the Positional Tracking module will fuse internally visual and inertial data to provide improved position tracking. You can also access IMU data using the code below: C++ Python C# if (zed.getSensorsData(sensor_data, TIME_REFERENCE::IMAGE) == ERROR_CODE::SUCCESS) { // Get IMU orientation auto imu_orientation = sensor_data.imu.pose.getOrientation(); // Get IMU acceleration auto acceleration = sensor_data.imu.linear_acceleration; cout << "IMU Orientation: {" << imu_orientation << "}, Acceleration: {" << acceleration << "}\n"; } sensors_data = sl.SensorsData() zed.get_sensors_data(sensors_data, sl.TIME_REFERENCE.IMAGE) zed_imu = zed_sensors.get_imu_data() # Get IMU orientation zed_imu_pose = sl.Transform() ox = round(zed_imu.get_pose(zed_imu_pose).get_orientation().get()[0], 3) oy = round(zed_imu.get_pose(zed_imu_pose).get_orientation().get()[1], 3) oz = round(zed_imu.get_pose(zed_imu_pose).get_orientation().get()[2], 3) ow = round(zed_imu.get_pose(zed_imu_pose).get_orientation().get()[3], 3) print("IMU Orientation: Ox: {0}, Oy: {1}, Oz {2}, Ow: {3}\n".format(ox, oy, oz, ow)) # Get IMU acceleration acceleration = [0,0,0] zed_imu.get_linear_acceleration(acceleration) ax = round(acceleration[0], 3) ay = round(acceleration[1], 3) az = round(acceleration[2], 3) print("IMU Acceleration: Ax: {0}, Ay: {1}, Az {2}\n".format(ax, ay, az)) SensorsData sensors_data = new SensorsData(); if (zed.GetSensorsData(ref sensors_data, TIME_REFERENCE.CURRENT)) { // Get IMU orientation Quaternion imu_orientation = sensors_data.imu.fusedOrientation; // Get IMU acceleration Vector3 acceleration = sensors_data.imu.linearAcceleration; Console.WriteLine("IMU Orientation : " + imu_orientation); Console.WriteLine("Acceleration : " + acceleration); } For more information on Camera-IMU and other onboard sensors, check the Sensors section. Close the Camera After tracking the ZED camera position for 1000 frames, we disable the tracking module and close the camera. C++ Python C# // Disable positional tracking and close the camera zed.disablePositionalTracking(); zed.close(); return 0; # Disable positional tracking and close the camera zed.disable_positional_tracking(); zed.close() // Disable positional tracking and close the camera zedCamera.DisablePositionalTracking(); zedCamera.Close(); Advanced Example To learn how to retrieve and display the live position and orientation of the camera in a 3D window, transform pose data and change coordinate systems and units, check the advanced Motion Tracking sample code. Next Steps Read the next tutorials to learn how to use Spatial Mapping and Object Detection.