ROS Resources: Documentation | Support | Discussion Forum | Index | Service Status | ros @ Robotics Stack Exchange
Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

Using openni::Videostream. Find below some example source code (header and footer to see where it goes in the main of tracker.cpp).

// Get Tracker Parameters
if(!pnh.getParam("tf_prefix", tf_prefix)){
    ROS_ERROR("tf_prefix not found on Param Server! Check your launch file!");
    return -1;
}
if(!pnh.getParam("relative_frame", relative_frame)){
    ROS_ERROR("relative_frame not found on Param Server! Check your launch file!");
    return -1;
}

// Initial OpenNI
if( openni::OpenNI::initialize() != openni::STATUS_OK )
{
    ROS_ERROR("openni initial error: \n");
    //cerr << "OpenNI Initial Error: " << OpenNI::getExtendedError() << endl;
    return -1;
}

// Open Device
openni::Device  devDevice;
if( devDevice.open( openni::ANY_DEVICE ) != openni::STATUS_OK )
{
    ROS_ERROR("Can't Open Device: \n");
    return -1;
}
ROS_INFO("device opened\n");

// NITE Stuff
nite::UserTracker userTracker;
nite::Status niteRc;
nite::NiTE::initialize();
niteRc = userTracker.create(&devDevice);
if (niteRc != nite::STATUS_OK){
    printf("Couldn't create user tracker\n");
    return 3;
}


// Create color stream
openni::VideoStream vsColorStream;

if( vsColorStream.create( devDevice, openni::SENSOR_COLOR ) == openni::STATUS_OK )
{
    // set video mode
    openni::VideoMode mMode;
    //mMode.setResolution( 640, 480 );
    mMode.setResolution( 640, 480 );
    mMode.setFps( 30 );
    mMode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 );

    if( vsColorStream.setVideoMode( mMode) != openni::STATUS_OK )
    {
        ROS_INFO("Can't apply videomode\n");
        //cout << "Can't apply VideoMode: " << OpenNI::getExtendedError() << endl;
    }

    // image registration
    if( devDevice.isImageRegistrationModeSupported( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ) )
    {
        devDevice.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR );
    }
    vsColorStream.setMirroringEnabled(false);
}
else
{
    ROS_ERROR("Can't create color stream on device: ");// << OpenNI::getExtendedError() << endl;
    //cerr <<  "Can't create color stream on device: " << OpenNI::getExtendedError() << endl;
    return -1;
}


// Loop
printf("\nStart moving around to get detected...\n(PSI pose may be required for skeleton calibration, depending on the configuration)\n");

nite::UserTrackerFrameRef userTrackerFrame;

// create OpenCV Window
cv::namedWindow( "User Image",  CV_WINDOW_AUTOSIZE );

// start
vsColorStream.start();

// while (!wasKeyboardHit()){
ros::Rate rate(100.0);
while (nh.ok()){
    niteRc = userTracker.readFrame(&userTrackerFrame);
    if (niteRc != nite::STATUS_OK){
        printf("Get next frame failed\n");
        continue;
    }

    // get depth frame
    openni::VideoFrameRef depthFrame;
    depthFrame = userTrackerFrame.getDepthFrame();

    // get color frame
    openni::VideoFrameRef vfColorFrame;
    cv::Mat mImageBGR;
    if( vsColorStream.readFrame( &vfColorFrame ) == openni::STATUS_OK )
    {
        // convert data to OpenCV format
        const cv::Mat mImageRGB( vfColorFrame.getHeight(), vfColorFrame.getWidth(), CV_8UC3, const_cast<void*>( vfColorFrame.getData() ) );
        // convert form RGB to BGR
        cv::cvtColor( mImageRGB, mImageBGR, CV_RGB2BGR );
        vfColorFrame.release();
    }

    const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
    for (int i = 0; i < users.getSize(); ++i) {

Using openni::Videostream. Find below some example relevant source code (header and footer to see where it goes in the main of tracker.cpp).tracker.cpp). I am also copying the complete file but it is a mess and read the bold text as you need to change two variables.

Relevant snippet

// Get Tracker Parameters
if(!pnh.getParam("tf_prefix", tf_prefix)){
    ROS_ERROR("tf_prefix not found on Param Server! Check your launch file!");
    return -1;
}
if(!pnh.getParam("relative_frame", relative_frame)){
    ROS_ERROR("relative_frame not found on Param Server! Check your launch file!");
    return -1;
}

// Initial OpenNI
if( openni::OpenNI::initialize() != openni::STATUS_OK )
{
    ROS_ERROR("openni initial error: \n");
    //cerr << "OpenNI Initial Error: " << OpenNI::getExtendedError() << endl;
    return -1;
}

// Open Device
openni::Device  devDevice;
if( devDevice.open( openni::ANY_DEVICE ) != openni::STATUS_OK )
{
    ROS_ERROR("Can't Open Device: \n");
    return -1;
}
ROS_INFO("device opened\n");

// NITE Stuff
nite::UserTracker userTracker;
nite::Status niteRc;
nite::NiTE::initialize();
niteRc = userTracker.create(&devDevice);
if (niteRc != nite::STATUS_OK){
    printf("Couldn't create user tracker\n");
    return 3;
}


// Create color stream
openni::VideoStream vsColorStream;

if( vsColorStream.create( devDevice, openni::SENSOR_COLOR ) == openni::STATUS_OK )
{
    // set video mode
    openni::VideoMode mMode;
    //mMode.setResolution( 640, 480 );
    mMode.setResolution( 640, 480 );
    mMode.setFps( 30 );
    mMode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 );

    if( vsColorStream.setVideoMode( mMode) != openni::STATUS_OK )
    {
        ROS_INFO("Can't apply videomode\n");
        //cout << "Can't apply VideoMode: " << OpenNI::getExtendedError() << endl;
    }

    // image registration
    if( devDevice.isImageRegistrationModeSupported( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ) )
    {
        devDevice.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR );
    }
    vsColorStream.setMirroringEnabled(false);
}
else
{
    ROS_ERROR("Can't create color stream on device: ");// << OpenNI::getExtendedError() << endl;
    //cerr <<  "Can't create color stream on device: " << OpenNI::getExtendedError() << endl;
    return -1;
}


// Loop
printf("\nStart moving around to get detected...\n(PSI pose may be required for skeleton calibration, depending on the configuration)\n");

nite::UserTrackerFrameRef userTrackerFrame;

// create OpenCV Window
cv::namedWindow( "User Image",  CV_WINDOW_AUTOSIZE );

// start
vsColorStream.start();

// while (!wasKeyboardHit()){
ros::Rate rate(100.0);
while (nh.ok()){
    niteRc = userTracker.readFrame(&userTrackerFrame);
    if (niteRc != nite::STATUS_OK){
        printf("Get next frame failed\n");
        continue;
    }

    // get depth frame
    openni::VideoFrameRef depthFrame;
    depthFrame = userTrackerFrame.getDepthFrame();

    // get color frame
    openni::VideoFrameRef vfColorFrame;
    cv::Mat mImageBGR;
    if( vsColorStream.readFrame( &vfColorFrame ) == openni::STATUS_OK )
    {
        // convert data to OpenCV format
        const cv::Mat mImageRGB( vfColorFrame.getHeight(), vfColorFrame.getWidth(), CV_8UC3, const_cast<void*>( vfColorFrame.getData() ) );
        // convert form RGB to BGR
        cv::cvtColor( mImageRGB, mImageBGR, CV_RGB2BGR );
        vfColorFrame.release();
    }

    const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
    for (int i = 0; i < users.getSize(); ++i) {

Complete file and fairly messy but to compare with yours:

You need to change in main the following two variables as I am saving the images and the skeleton: std::string path("/home/yiannis/datasets/recorded/scrap/"); std::string filename_skeleton("/home/yiannis/datasets/recorded/scrap/skeleton.csv");

Full file

/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2013, Johns Hopkins University
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the Johns Hopkins University nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/

/*
 * Author: Kelleher Guerin, futureneer@gmail.com, Johns Hopkins University
 */

// ROS Dependencies
#include <ros/ros.h>
#include <ros/package.h>
#include <tf/transform_broadcaster.h>
#include <kdl/frames.hpp>
#include <iostream>
#include <sstream>
#include <fstream>
// OpenCV Header
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
// NITE Dependencies
#include "NiTE.h"
#include "NiteSampleUtilities.h"
// Tracker Messages
#include <openni2_tracker/TrackerUser.h>
#include <openni2_tracker/TrackerUserArray.h>
// OpenCV Header
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>


#define SSTR( x ) dynamic_cast< std::ostringstream & >( \
    ( std::ostringstream() << std::dec << x ) ).str()

#define GL_WIN_SIZE_X   1280
#define GL_WIN_SIZE_Y   1024

#define MAX_USERS 10
bool g_visibleUsers[MAX_USERS] = {false};
nite::SkeletonState g_skeletonStates[MAX_USERS] = {nite::SKELETON_NONE};

#define USER_MESSAGE(msg) \
    {printf("[%08llu] User #%d:\t%s\n",ts, user.getId(),msg);}

typedef std::map<std::string, nite::SkeletonJoint> JointMap;


void updateUserState(const nite::UserData& user, unsigned long long ts){
    if (user.isNew())
        USER_MESSAGE("New")
    else if (user.isVisible() && !g_visibleUsers[user.getId()])
        USER_MESSAGE("Visible")
    else if (!user.isVisible() && g_visibleUsers[user.getId()])
        USER_MESSAGE("Out of Scene")
    else if (user.isLost())
        USER_MESSAGE("Lost")

    g_visibleUsers[user.getId()] = user.isVisible();


    if(g_skeletonStates[user.getId()] != user.getSkeleton().getState())
    {
        switch(g_skeletonStates[user.getId()] = user.getSkeleton().getState())
        {
        case nite::SKELETON_NONE:
            USER_MESSAGE("Stopped tracking.")
            break;
        case nite::SKELETON_CALIBRATING:
            USER_MESSAGE("Calibrating...")
            break;
        case nite::SKELETON_TRACKED:
            USER_MESSAGE("Tracking!")
            break;
        case nite::SKELETON_CALIBRATION_ERROR_NOT_IN_POSE:
        case nite::SKELETON_CALIBRATION_ERROR_HANDS:
        case nite::SKELETON_CALIBRATION_ERROR_LEGS:
        case nite::SKELETON_CALIBRATION_ERROR_HEAD:
        case nite::SKELETON_CALIBRATION_ERROR_TORSO:
            USER_MESSAGE("Calibration Failed... :-|")
            break;
        }
    }
}

bool publishJointTF(ros::NodeHandle& nh, tf::TransformBroadcaster& br, std::string j_name, nite::SkeletonJoint j, std::string tf_prefix, std::string relative_frame, int uid){

    if (j.getPositionConfidence() > 0.0){
      tf::Transform transform;
      transform.setOrigin(tf::Vector3(j.getPosition().x/1000.0, j.getPosition().y/1000.0, j.getPosition().z/1000.0));
      transform.setRotation(tf::Quaternion(0, 0, 0, 1));
      std::stringstream frame_id_stream;
      std::string frame_id;
      frame_id_stream << "/" << tf_prefix << "/user_" << uid << "/" << j_name;
      frame_id = frame_id_stream.str();
      // std::cout << frame_id << std::endl;
      br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), relative_frame, frame_id));
    }
    return true;
}

int main(int argc, char** argv){
    ROS_INFO("START\n");

    std::string path("/home/yiannis/datasets/recorded/scrap/");
    std::string prefix("img");
    std::string ext(".jpg");
    std::string filename_color;
    std::string filename_skeleton("/home/yiannis/datasets/recorded/scrap/skeleton.csv");
    int i;
    int g_nXRes = 0, g_nYRes = 0;
    std::ofstream fp_skeleton;

    ros::init(argc, argv, "openni2_tracker");
    ros::NodeHandle nh;
    ros::NodeHandle pnh("~"); //private node handler
    std::string tf_prefix, relative_frame = "";
    tf::TransformBroadcaster br;
    pnh.setParam("frame_counter", 0);

/*
openni::Status rc = openni::OpenNI::initialize();
openni::Device device ;
rc = device.open("file.oni");
nite::UserTracker userTracker;
nite::NiTE::initialize();
niteRc = userTracker.create(&device);
*/


    // Get Tracker Parameters
    if(!pnh.getParam("tf_prefix", tf_prefix)){
        ROS_ERROR("tf_prefix not found on Param Server! Check your launch file!");
        return -1;
    }
    if(!pnh.getParam("relative_frame", relative_frame)){
        ROS_ERROR("relative_frame not found on Param Server! Check your launch file!");
        return -1;
    }

    // Initial OpenNI
    if( openni::OpenNI::initialize() != openni::STATUS_OK )
    {
        ROS_ERROR("openni initial error: \n");
        //cerr << "OpenNI Initial Error: " << OpenNI::getExtendedError() << endl;
        return -1;
    }

    // Open Device
    openni::Device  devDevice;
    if( devDevice.open( openni::ANY_DEVICE ) != openni::STATUS_OK )
    {
        ROS_ERROR("Can't Open Device: \n");
        return -1;
    }
    ROS_INFO("device opened\n");

    //bool modeIsSupported = false;
    //modeIsSupported = devDevice.isImageRegistrationModeSupported(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
    //printf("%d\n", modeIsSupported);
    devDevice.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
    devDevice.setDepthColorSyncEnabled(true);

    // NITE Stuff
    nite::UserTracker userTracker;
    nite::Status niteRc;
    nite::NiTE::initialize();
    niteRc = userTracker.create(&devDevice);
    if (niteRc != nite::STATUS_OK){
        printf("Couldn't create user tracker\n");
        return 3;
    }


    // Create color stream
    openni::VideoStream vsColorStream;

    if( vsColorStream.create( devDevice, openni::SENSOR_COLOR ) == openni::STATUS_OK )
    {
        // set video mode
        openni::VideoMode mMode;
        //mMode.setResolution( 640, 480 );
        mMode.setResolution( 640, 480 );
        mMode.setFps( 30 );
        mMode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 );

        if( vsColorStream.setVideoMode( mMode) != openni::STATUS_OK )
        {
            ROS_INFO("Can't apply videomode\n");
            //cout << "Can't apply VideoMode: " << OpenNI::getExtendedError() << endl;
        }

        // image registration
        if( devDevice.isImageRegistrationModeSupported( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ) )
        {
            devDevice.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR );
        }
        vsColorStream.setMirroringEnabled(false);
    }
    else
    {
        ROS_ERROR("Can't create color stream on device: ");// << OpenNI::getExtendedError() << endl;
        //cerr <<  "Can't create color stream on device: " << OpenNI::getExtendedError() << endl;
        return -1;
    }


    // Loop
    printf("\nStart moving around to get detected...\n(PSI pose may be required for skeleton calibration, depending on the configuration)\n");

    nite::UserTrackerFrameRef userTrackerFrame;

    // create OpenCV Window
    cv::namedWindow( "User Image",  CV_WINDOW_AUTOSIZE );

    // start
    vsColorStream.start();

    // while (!wasKeyboardHit()){
    ros::Rate rate(100.0);
    while (nh.ok()){
        niteRc = userTracker.readFrame(&userTrackerFrame);
        if (niteRc != nite::STATUS_OK){
            printf("Get next frame failed\n");
            continue;
        }

        // get depth frame
        openni::VideoFrameRef depthFrame;
        depthFrame = userTrackerFrame.getDepthFrame();

        // get color frame
        openni::VideoFrameRef vfColorFrame;
        cv::Mat mImageBGR;
        if( vsColorStream.readFrame( &vfColorFrame ) == openni::STATUS_OK )
        {
            // convert data to OpenCV format
            const cv::Mat mImageRGB( vfColorFrame.getHeight(), vfColorFrame.getWidth(), CV_8UC3, const_cast<void*>( vfColorFrame.getData() ) );
            // convert form RGB to BGR
            cv::cvtColor( mImageRGB, mImageBGR, CV_RGB2BGR );
            vfColorFrame.release();
        }

        const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
        for (int i = 0; i < users.getSize(); ++i) {
            if (i > 0)
                ROS_ERROR("Handling more than one skeleton is not implemented\n");
            const nite::UserData& user = users[i];
            updateUserState(user,userTrackerFrame.getTimestamp());
            if (user.isNew()){
                userTracker.startSkeletonTracking(user.getId());
            } else if (user.getSkeleton().getState() == nite::SKELETON_TRACKED){
                pnh.getParam("frame_counter", i);

                std::string line;
                line = SSTR(i) + "," + SSTR(user.getId());

                JointMap named_joints;              
                named_joints["head"] = (user.getSkeleton().getJoint(nite::JOINT_HEAD) );
                named_joints["neck"] = (user.getSkeleton().getJoint(nite::JOINT_NECK) );
                named_joints["left_shoulder"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER) );
                named_joints["right_shoulder"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER) );
                named_joints["left_elbow"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW) );
                named_joints["right_elbow"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW) );
                named_joints["left_hand"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND) );
                named_joints["right_hand"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND) );
                named_joints["torso"] = (user.getSkeleton().getJoint(nite::JOINT_TORSO) );
                named_joints["left_hip"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_HIP) );
                named_joints["right_hip"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_HIP) );
                named_joints["left_knee"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_KNEE) );
                named_joints["right_knee"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_KNEE) );
                named_joints["left_foot"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_FOOT) );
                named_joints["right_foot"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_FOOT) );

                for (JointMap::iterator it=named_joints.begin(); it!=named_joints.end(); ++it){
                    line += "," + SSTR(it->second.getPosition().x) + "," + SSTR(it->second.getPosition().y) +"," + SSTR(it->second.getPosition().z);
                    //printf("%s\n", it->first.c_str());
                    publishJointTF(nh,br,it->first,it->second,tf_prefix,relative_frame,user.getId());
                }
                //line = SSTR(named_joints["left_hand"].getPosition().x) + "," + SSTR(named_joints["left_hand"].getPosition().y) + "," + SSTR(named_joints["left_hand"].getPosition().z);
                /*
                std::string foo = "right_hand";
                float coordinates[2] = {0};
                userTracker.convertJointCoordinatesToDepth(named_joints[foo.c_str()].getPosition().x, named_joints[foo.c_str()].getPosition().y, named_joints[foo.c_str()].getPosition().z, &coordinates[0], &coordinates[1]);

                g_nXRes = depthFrame.getVideoMode().getResolutionX();
                g_nYRes = depthFrame.getVideoMode().getResolutionY();
                //printf("%f %f %d %d\n", coordinates[0], coordinates[1], g_nXRes, g_nYRes);
                //nite::SkeletonJoint joint1 = userData.getSkeleton().getJoint(nite::JOINT_LEFT_HAND);
                coordinates[0] *= GL_WIN_SIZE_X/g_nXRes;
                coordinates[1] *= GL_WIN_SIZE_Y/g_nYRes;
                */

                //line += "\n";
                //printf("%f %f %f\n", named_joints["left_hand"].getPosition().x, named_joints["left_hand"].getPosition().y, named_joints["left_hand"].getPosition().z);
                //printf("%s\n", line.c_str());

                fp_skeleton.open(filename_skeleton.c_str(), std::ios::app);
                fp_skeleton << line << std::endl;
                fp_skeleton.close();

                cv::imshow("User Image", mImageBGR);                
                filename_color = path + prefix + SSTR(i) + ext;
                //printf("%s\n", filename_color.c_str());
                imwrite(filename_color.c_str(), mImageBGR);
                pnh.setParam("frame_counter", ++i);
            }
        }
        rate.sleep();

        // check keyboard
        if( cv::waitKey( 1 ) == 'q' )
            break;
    }
    fp_skeleton.close();
    vsColorStream.stop();
    devDevice.close();
    nite::NiTE::shutdown();
}