Ask Your Question

Revision history [back]

Slow data rate from Camera vision

I am using a Raspberry Pi 3B with Pi Camera and I set up the python code below to help me compute the position of a robot and publish it in order to use the data to control the robot. But I have an issue with the C++ code acting on those data. The code reacts way faster than the data are published making the robot to always stop ahead of the target position.

I am using the published data in a while loop in the C++ node controlling the robot. I tried to slow down the speed of the loop, but that does not seem to help.

Could you help me see through this?

#!/usr/bin/env python
import rospy
from std_msgs.msg import Float32

# import the necessary packages
from picamera.array import PiRGBArray
from picamera import PiCamera
from collections import deque
import datetime
import time
import cv2
import argparse
# import imutils  (removed)

# Math functions
import numpy as np

pub_x = rospy.Publisher('ball_pose_x', Float32, queue_size=10)
pub_y = rospy.Publisher('ball_pose_y', Float32, queue_size=10)
rospy.init_node('ball_tracking')

# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))

# allow the camera to warmup
time.sleep(0.1)

# ----- Basic definition -----------

# Args definition
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video",
    help="path to the (optional) video file")
ap.add_argument("-b", "--buffer", type=int, default=64,
    help="max buffer size")
args = vars(ap.parse_args())


# define the lower and upper boundaries of the
# ball in the HSV color space, then initialize the
# list of tracked points
whiteLower = (0, 0, 230)
whiteUpper = (180, 25, 255)
pts = deque(maxlen=args["buffer"])

# ------ end definition ----------

# capture frames from the camera
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
    # grab the raw NumPy array representing the image, then initialize the timestamp
    # and occupied/unoccupied text
    image = frame.array
    # print " "
    # print datetime.datetime.now()
    # -------------------- Modif start------------------

    # blurred = cv2.GaussianBlur(frame, (11, 11), 0)
    hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)

    # construct a mask for the color "white", then perform
    # a series of dilations and erosions to remove any small
    # blobs left in the mask
    mask = cv2.inRange(hsv, whiteLower, whiteUpper)
    mask = cv2.erode(mask, None, iterations=2)
    mask = cv2.dilate(mask, None, iterations=2)

    # find contours in the mask and initialize the current
    # (x, y) center of the ball
    cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
        cv2.CHAIN_APPROX_SIMPLE)[-2]
    center = None

    # only proceed if at least one contour was found
    if len(cnts) > 0:
        # find the largest contour in the mask, then use
        # it to compute the minimum enclosing circle and
        # centroid
        c = max(cnts, key=cv2.contourArea)
        ((x, y), radius) = cv2.minEnclosingCircle(c)
        M = cv2.moments(c)
        center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))

        # Publishing data to ROS
        # print datetime.datetime.now()
        pub_x.publish(M["m10"] / M["m00"])
        pub_y.publish(M["m01"] / M["m00"])

        # only proceed if the radius meets a minimum size
        if radius > 5:
            # draw the circle and centroid on the frame,
            # then update the list of tracked points
            cv2.circle(image, (int(x), int(y)), int(radius),
                (0, 255, 255), 2)
            cv2.circle(image, center, 5, (0, 0, 255), -1)

    # update the points queue
    # pts.appendleft(center)

    # ------------------- Modif end ------------------- 

    # show the frame
    cv2.imshow("Frame", image)
    key = cv2.waitKey(1) & 0xFF

    # clear the stream in preparation for the next frame
    rawCapture.truncate(0)

    # if the `q` key was pressed, break from the loop
    if key == ord("q"):
        break

Slow data rate from Camera vision

I am using a Raspberry Pi 3B with Pi Camera and I set up the python code below to help me compute the position of a robot and publish it in order to use the data to control the robot. But I have an issue with the C++ code acting on those data. The code reacts way faster than the data are published making the robot to always stop ahead of the target position.

I am using the published data in a while loop in the C++ node controlling the robot. I tried to slow down the speed of the loop, but that does not seem to help.

Could you help me see through this?

#!/usr/bin/env python
import rospy
from std_msgs.msg import Float32

# import the necessary packages
from picamera.array import PiRGBArray
from picamera import PiCamera
from collections import deque
import datetime
import time
import cv2
import argparse
# import imutils  (removed)

# Math functions
import numpy as np

pub_x = rospy.Publisher('ball_pose_x', Float32, queue_size=10)
pub_y = rospy.Publisher('ball_pose_y', Float32, queue_size=10)
rospy.init_node('ball_tracking')

# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))

# allow the camera to warmup
time.sleep(0.1)

# ----- Basic definition -----------

# Args definition
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video",
    help="path to the (optional) video file")
ap.add_argument("-b", "--buffer", type=int, default=64,
    help="max buffer size")
args = vars(ap.parse_args())


# define the lower and upper boundaries of the
# ball in the HSV color space, then initialize the
# list of tracked points
whiteLower = (0, 0, 230)
whiteUpper = (180, 25, 255)
pts = deque(maxlen=args["buffer"])

# ------ end definition ----------

# capture frames from the camera
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
    # grab the raw NumPy array representing the image, then initialize the timestamp
    # and occupied/unoccupied text
    image = frame.array
    # print " "
    # print datetime.datetime.now()
    # -------------------- Modif start------------------

    # blurred = cv2.GaussianBlur(frame, (11, 11), 0)
    hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)

    # construct a mask for the color "white", then perform
    # a series of dilations and erosions to remove any small
    # blobs left in the mask
    mask = cv2.inRange(hsv, whiteLower, whiteUpper)
    mask = cv2.erode(mask, None, iterations=2)
    mask = cv2.dilate(mask, None, iterations=2)

    # find contours in the mask and initialize the current
    # (x, y) center of the ball
    cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
        cv2.CHAIN_APPROX_SIMPLE)[-2]
    center = None

    # only proceed if at least one contour was found
    if len(cnts) > 0:
        # find the largest contour in the mask, then use
        # it to compute the minimum enclosing circle and
        # centroid
        c = max(cnts, key=cv2.contourArea)
        ((x, y), radius) = cv2.minEnclosingCircle(c)
        M = cv2.moments(c)
        center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))

        # Publishing data to ROS
        # print datetime.datetime.now()
        pub_x.publish(M["m10"] / M["m00"])
        pub_y.publish(M["m01"] / M["m00"])

        # only proceed if the radius meets a minimum size
        if radius > 5:
            # draw the circle and centroid on the frame,
            # then update the list of tracked points
            cv2.circle(image, (int(x), int(y)), int(radius),
                (0, 255, 255), 2)
            cv2.circle(image, center, 5, (0, 0, 255), -1)

    # update the points queue
    # pts.appendleft(center)

    # ------------------- Modif end ------------------- 

    # show the frame
    cv2.imshow("Frame", image)
    key = cv2.waitKey(1) & 0xFF

    # clear the stream in preparation for the next frame
    rawCapture.truncate(0)

    # if the `q` key was pressed, break from the loop
    if key == ord("q"):
        break

C++ code

#include "ros/ros.h"

include "std_msgs/Float32.h"

include <sstream>

include <fstream>

include <math.h>

include "geometry_msgs/Twist.h"

using namespace std;

ros::Subscriber pose_subx; ros::Subscriber pose_suby; std_msgs::Float32 ball_posex; std_msgs::Float32 ball_posey; ros::Publisher velocity_publisher;

void poseCallbackx(const std_msgs::Float32::ConstPtr & pose_message); void poseCallbacky(const std_msgs::Float32::ConstPtr & pose_message); double getDistanceX(double x, double Xref); double getDistanceY(double y, double Yref); void MyTest(double goalx);

int main(int argc, char **argv) { ros::init(argc, argv, "s_control"); ros::NodeHandle n;

pose_subx = n.subscribe("/ball_pose_x", 10, poseCallbackx); 
pose_suby = n.subscribe("/ball_pose_y", 10, poseCallbacky);
velocity_publisher = n.advertise<geometry_msgs::Twist>("/cmd_vel", 100);
ros::Rate loop_rate(10);

MyTest(400);
loop_rate.sleep();
ros::spin();
return 0;

}

void poseCallbackx(const std_msgs::Float32::ConstPtr & pose_message) { ball_posex.data = pose_message->data;}

void poseCallbacky(const std_msgs::Float32::ConstPtr & pose_message) { ball_posey.data = pose_message->data;}

double getDistanceX(double x, double Xref) { return abs(Xref-x); }

double getDistanceY(double y, double Yref) { return abs(Yref-y); }

void MyTest(double goalx) { double t0=ros::Time::now().toSec(); double current_time; geometry_msgs::Twist vel_msg; double x0, y0, x1, y1; double theta; double MyTime = 1.5;

double vx, vy, vxn, vyn;

ros::Rate loop_rate(10);
ros::spinOnce();

//Finding the angle

do{
    ros::spinOnce();
    double t1 = ros::Time::now().toSec();
    current_time = t1-t0;
    loop_rate.sleep();
        if(current_time<MyTime*0.5)
        {
        x0 = ball_posex.data;
        y0 = ball_posey.data;
        }           
    vel_msg.linear.x = 30;
    vel_msg.linear.y = 0;
    vel_msg.linear.z = 0;
    velocity_publisher.publish(vel_msg);

}while(current_time<MyTime);

ros::Duration(1).sleep();

x1 = ball_posex.data;
y1 = ball_posey.data;

theta = acos ((x1-x0)/sqrt(pow((x1-x0),2) + pow((y1-y0),2)));

loop_rate.sleep();

//X direction

double e_x_old;
double e_y_old;
double e_x;
double e_y;
e_x = 0;
e_y = 0; 

double v_x_old = 0;
double v_y_old = 0;

double kp = 0.5;
double ki = 0.1;


double tolerance = 30;

double goaly = 250;

do{
    ros::spinOnce();

    e_x_old = e_x;
    e_y_old = e_y;

    e_x = goalx - ball_posex.data;
    e_y = goaly - ball_posey.data;
    //cout <<e_x<<", ";

    double t1 = ros::Time::now().toSec();
    current_time = t1-t0;

    if(getDistanceX(goalx, ball_posex.data)>tolerance)
        {           
        //Pi controller
        vx = v_x_old + kp*e_x + e_x_old*(ki-kp);        
        vy = 0;

            if(getDistanceY(goaly, ball_posey.data)>tolerance)
            {
            vy = v_y_old + kp*e_y + e_y_old*(ki-kp);
            }

        v_x_old = vx;

        //Rotation
        vxn = vx*cos(-theta) - vy*sin(-theta);
        vyn = vx*sin(-theta) + vy*cos(-theta);  

        if(vxn>30)
            {
            vx = 30;
            vxn = vx*cos(-theta) - vy*sin(-theta);
            vyn = vx*sin(-theta) + vy*cos(-theta);
            }

        if(vxn<-30)
            {   
            vx = -30;
            vxn = vx*cos(-theta) - vy*sin(-theta);
            vyn = vx*sin(-theta) + vy*cos(-theta);
            }       

        vx = vxn;
        vy = vyn;

        if((x1-x0)<0)
            {
                vx = -vx;
                vy = -vy;
            }       

        if ((y1-y0)>0)
            vy = -vy;

        vel_msg.linear.x = vx;
        vel_msg.linear.y = vy;
        vel_msg.linear.z = 0;
        //cout<<vx<<", "<<vy<<"\n"; 
        velocity_publisher.publish(vel_msg);
    }
    /**else
    {
        vel_msg.linear.x = 0;
        vel_msg.linear.y = 0;
        vel_msg.linear.z = 0; 
        velocity_publisher.publish(vel_msg);
    }**/ 
    loop_rate.sleep();

    ros::Duration(0.05).sleep();

}while(current_time<60);

}