ROS Resources: Documentation | Support | Discussion Forum | Index | Service Status | ros @ Robotics Stack Exchange
Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

I've looked at the code and I've seen that frames are grabbed sequentially but timestamps are taken only after the overall grab operation. I think that should be better to set left timestamp just after the left frame grab and the right one after the right grab. This can cause a small overhead because timestams are set also for discarded frames.

I've looked at the code and I've seen that frames are grabbed sequentially but timestamps are taken only after the overall grab operation. I think that should be better to set left timestamp just after the left frame grab and the right one after the right grab. This can cause a small overhead because timestams are set also for discarded frames.


UPDATE: I'm going on studying the synchronization error. I've modified the code of the stereo_node in order to take and publish real timestamps for the left and the right frame grabbing. Let's look at the code of /uvc_camera/src/stereo.cpp

void StereoCamera::sendInfo(ros::Time time) {
  CameraInfoPtr info_left(new CameraInfo(left_info_mgr.getCameraInfo()));
  CameraInfoPtr info_right(new CameraInfo(right_info_mgr.getCameraInfo()));

  info_left->header.stamp = time;
  info_right->header.stamp = time;
  info_left->header.frame_id = frame;
  info_right->header.frame_id = frame;

  left_info_pub.publish(info_left);
  right_info_pub.publish(info_right);
}


void StereoCamera::feedImages() {
  unsigned int pair_id = 0;
  while (ok) {
    unsigned char *frame_left = NULL, *frame_right = NULL;
    uint32_t bytes_used_left, bytes_used_right;

    ros::Time capture_time = ros::Time::now();

    int left_idx = cam_left->grab(&frame_left, bytes_used_left);
    int right_idx = cam_right->grab(&frame_right, bytes_used_right);

    /* Read in every frame the camera generates, but only send each
     * (skip_frames + 1)th frame. This reduces effective frame
     * rate, processing time and network usage while keeping the
     * images synchronized within the true framerate.
     */
    if (skip_frames == 0 || frames_to_skip == 0) {
      if (frame_left && frame_right) {
    ImagePtr image_left(new Image);
    ImagePtr image_right(new Image);

    image_left->height = height;
    image_left->width = width;
    image_left->step = 3 * width;
    image_left->encoding = image_encodings::RGB8;
    image_left->header.stamp = capture_time;
    image_left->header.seq = pair_id;

    image_right->height = height;
    image_right->width = width;
    image_right->step = 3 * width;
    image_right->encoding = image_encodings::RGB8;
    image_right->header.stamp = capture_time;
    image_right->header.seq = pair_id;

    image_left->header.frame_id = frame;
    image_right->header.frame_id = frame;

    image_left->data.resize(image_left->step * image_left->height);
    image_right->data.resize(image_right->step * image_right->height);

    if (rotate_left)
      rotate(&image_left->data[0], frame_left, width * height);
    else
      memcpy(&image_left->data[0], frame_left, width * height * 3);

    if (rotate_right)
      rotate(&image_right->data[0], frame_right, width * height);
    else
      memcpy(&image_right->data[0], frame_right, width * height * 3);

    left_pub.publish(image_left);
    right_pub.publish(image_right);

    sendInfo(capture_time);

    ++pair_id;
      }

      frames_to_skip = skip_frames;
    } else {
      frames_to_skip--;
    }

    if (frame_left)
      cam_left->release(left_idx);
    if (frame_right)
      cam_right->release(right_idx);
  }
}

I've modified it setting thwo different timestamps getting very strange results. I'm using the following:

void StereoCamera::sendInfo(ros::Time time_left, ros::Time time_right) {
  CameraInfoPtr info_left(new CameraInfo(left_info_mgr.getCameraInfo()));
  CameraInfoPtr info_right(new CameraInfo(right_info_mgr.getCameraInfo()));

  info_left->header.stamp = time_left;
  info_right->header.stamp = time_right;
  info_left->header.frame_id = frame;
  info_right->header.frame_id = frame;

  left_info_pub.publish(info_left);
  right_info_pub.publish(info_right);
}


void StereoCamera::feedImages() {
  unsigned int pair_id = 0;
  while (ok) {
    unsigned char *frame_left = NULL, *frame_right = NULL;
    uint32_t bytes_used_left, bytes_used_right;


    ros::Time capture_time_left = ros::Time::now();
    int left_idx = cam_left->grab(&frame_left, bytes_used_left);


    ros::Time capture_time_right = ros::Time::now();
    int right_idx = cam_right->grab(&frame_right, bytes_used_right);

    /* Read in every frame the camera generates, but only send each
     * (skip_frames + 1)th frame. This reduces effective frame
     * rate, processing time and network usage while keeping the
     * images synchronized within the true framerate.
     */
    if (skip_frames == 0 || frames_to_skip == 0) {
      if (frame_left && frame_right) {
    ImagePtr image_left(new Image);
    ImagePtr image_right(new Image);

    image_left->height = height;
    image_left->width = width;
    image_left->step = 3 * width;
    image_left->encoding = image_encodings::RGB8;
    image_left->header.stamp = capture_time_left;
    image_left->header.seq = pair_id;

    image_right->height = height;
    image_right->width = width;
    image_right->step = 3 * width;
    image_right->encoding = image_encodings::RGB8;
    image_right->header.stamp = capture_time_right;
    image_right->header.seq = pair_id;

    image_left->header.frame_id = frame;
    image_right->header.frame_id = frame;

    image_left->data.resize(image_left->step * image_left->height);
    image_right->data.resize(image_right->step * image_right->height);

    if (rotate_left)
      rotate(&image_left->data[0], frame_left, width * height);
    else
      memcpy(&image_left->data[0], frame_left, width * height * 3);

    if (rotate_right)
      rotate(&image_right->data[0], frame_right, width * height);
    else
      memcpy(&image_right->data[0], frame_right, width * height * 3);

    left_pub.publish(image_left);
    right_pub.publish(image_right);

    sendInfo(capture_time_left, capture_time_right);

    ++pair_id;
      }

      frames_to_skip = skip_frames;
    } else {
      frames_to_skip--;
    }

    if (frame_left)
      cam_left->release(left_idx);
    if (frame_right)
      cam_right->release(right_idx);
  }
}

Results are very strange because more the framerate is high, more the frames are synchronized. Moreover, the difference between time_right and time_left corresponds to the left frame grabbing time: with slow framerates the grab time becomes very bigger (more than x2 factor)... it seems like you tell the camera to go slowly so it doesn't harry up on taking every frame :-) Now I'm going to organize data and tomorrow I'll write them here.

I've looked at the code and I've seen that frames are grabbed sequentially but timestamps are taken only after the overall grab operation. I think that should be better to set left timestamp just after the left frame grab and the right one after the right grab. This can cause a small overhead because timestams are set also for discarded frames.


UPDATE: I'm going on studying the synchronization error. I've modified the code of the stereo_node in order to take and publish real timestamps for the left and the right frame grabbing. Let's look at the code of /uvc_camera/src/stereo.cpp

void StereoCamera::sendInfo(ros::Time time) {
  CameraInfoPtr info_left(new CameraInfo(left_info_mgr.getCameraInfo()));
  CameraInfoPtr info_right(new CameraInfo(right_info_mgr.getCameraInfo()));

  info_left->header.stamp = time;
  info_right->header.stamp = time;
  info_left->header.frame_id = frame;
  info_right->header.frame_id = frame;

  left_info_pub.publish(info_left);
  right_info_pub.publish(info_right);
}


void StereoCamera::feedImages() {
  unsigned int pair_id = 0;
  while (ok) {
    unsigned char *frame_left = NULL, *frame_right = NULL;
    uint32_t bytes_used_left, bytes_used_right;

    ros::Time capture_time = ros::Time::now();

    int left_idx = cam_left->grab(&frame_left, bytes_used_left);
    int right_idx = cam_right->grab(&frame_right, bytes_used_right);

    /* Read in every frame the camera generates, but only send each
     * (skip_frames + 1)th frame. This reduces effective frame
     * rate, processing time and network usage while keeping the
     * images synchronized within the true framerate.
     */
    if (skip_frames == 0 || frames_to_skip == 0) {
      if (frame_left && frame_right) {
    ImagePtr image_left(new Image);
    ImagePtr image_right(new Image);

    image_left->height = height;
    image_left->width = width;
    image_left->step = 3 * width;
    image_left->encoding = image_encodings::RGB8;
    image_left->header.stamp = capture_time;
    image_left->header.seq = pair_id;

    image_right->height = height;
    image_right->width = width;
    image_right->step = 3 * width;
    image_right->encoding = image_encodings::RGB8;
    image_right->header.stamp = capture_time;
    image_right->header.seq = pair_id;

    image_left->header.frame_id = frame;
    image_right->header.frame_id = frame;

    image_left->data.resize(image_left->step * image_left->height);
    image_right->data.resize(image_right->step * image_right->height);

    if (rotate_left)
      rotate(&image_left->data[0], frame_left, width * height);
    else
      memcpy(&image_left->data[0], frame_left, width * height * 3);

    if (rotate_right)
      rotate(&image_right->data[0], frame_right, width * height);
    else
      memcpy(&image_right->data[0], frame_right, width * height * 3);

    left_pub.publish(image_left);
    right_pub.publish(image_right);

    sendInfo(capture_time);

    ++pair_id;
      }

      frames_to_skip = skip_frames;
    } else {
      frames_to_skip--;
    }

    if (frame_left)
      cam_left->release(left_idx);
    if (frame_right)
      cam_right->release(right_idx);
  }
}

I've modified it setting thwo two different timestamps getting very strange results. I'm using the following:following code:

void StereoCamera::sendInfo(ros::Time time_left, ros::Time time_right) {
  CameraInfoPtr info_left(new CameraInfo(left_info_mgr.getCameraInfo()));
  CameraInfoPtr info_right(new CameraInfo(right_info_mgr.getCameraInfo()));

  info_left->header.stamp = time_left;
  info_right->header.stamp = time_right;
  info_left->header.frame_id = frame;
  info_right->header.frame_id = frame;

  left_info_pub.publish(info_left);
  right_info_pub.publish(info_right);
}


void StereoCamera::feedImages() {
  unsigned int pair_id = 0;
  while (ok) {
    unsigned char *frame_left = NULL, *frame_right = NULL;
    uint32_t bytes_used_left, bytes_used_right;


    ros::Time capture_time_left = ros::Time::now();
    int left_idx = cam_left->grab(&frame_left, bytes_used_left);


    ros::Time capture_time_right = ros::Time::now();
    int right_idx = cam_right->grab(&frame_right, bytes_used_right);

    /* Read in every frame the camera generates, but only send each
     * (skip_frames + 1)th frame. This reduces effective frame
     * rate, processing time and network usage while keeping the
     * images synchronized within the true framerate.
     */
    if (skip_frames == 0 || frames_to_skip == 0) {
      if (frame_left && frame_right) {
    ImagePtr image_left(new Image);
    ImagePtr image_right(new Image);

    image_left->height = height;
    image_left->width = width;
    image_left->step = 3 * width;
    image_left->encoding = image_encodings::RGB8;
    image_left->header.stamp = capture_time_left;
    image_left->header.seq = pair_id;

    image_right->height = height;
    image_right->width = width;
    image_right->step = 3 * width;
    image_right->encoding = image_encodings::RGB8;
    image_right->header.stamp = capture_time_right;
    image_right->header.seq = pair_id;

    image_left->header.frame_id = frame;
    image_right->header.frame_id = frame;

    image_left->data.resize(image_left->step * image_left->height);
    image_right->data.resize(image_right->step * image_right->height);

    if (rotate_left)
      rotate(&image_left->data[0], frame_left, width * height);
    else
      memcpy(&image_left->data[0], frame_left, width * height * 3);

    if (rotate_right)
      rotate(&image_right->data[0], frame_right, width * height);
    else
      memcpy(&image_right->data[0], frame_right, width * height * 3);

    left_pub.publish(image_left);
    right_pub.publish(image_right);

    sendInfo(capture_time_left, capture_time_right);

    ++pair_id;
      }

      frames_to_skip = skip_frames;
    } else {
      frames_to_skip--;
    }

    if (frame_left)
      cam_left->release(left_idx);
    if (frame_right)
      cam_right->release(right_idx);
  }
}

Results are very strange because more the framerate is high, more the frames are synchronized. Moreover, the difference between time_right and time_left corresponds to the left frame grabbing time: with slow framerates the grab time becomes very bigger (more than x2 factor)... it seems like you tell the camera to go slowly so it doesn't harry up on taking every frame :-) Now I'm going to organize data and tomorrow I'll write them here.