Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
PingPong_Robot
Manage
Activity
Members
Labels
Plan
Issues
0
Issue boards
Milestones
Wiki
Code
Merge requests
0
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Abdelrahman Sabry Ahmed
PingPong_Robot
Commits
4af56b61
Commit
4af56b61
authored
5 years ago
by
Abdelrahman Sabry Ahmed
Browse files
Options
Downloads
Patches
Plain Diff
improving readability
parent
05818936
No related branches found
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
README.md
+3
-1
3 additions, 1 deletion
README.md
src/arm_node.cpp
+155
-165
155 additions, 165 deletions
src/arm_node.cpp
src/camera_node.cpp
+186
-242
186 additions, 242 deletions
src/camera_node.cpp
with
344 additions
and
408 deletions
README.md
+
3
−
1
View file @
4af56b61
...
...
@@ -3,7 +3,9 @@
1- for position detection i use intelrealsense D435 with the librealsense to get the RGB frames
and using opencv to get the ball position with color detection and blob detection then also using
the librealsense to calculate the distance to the ball then getting the exact coordinates is space.
(coordinates in camera_node are according to the camera's prespective)
2- the coordinates are then sent to the arm_node from the camera_node through a ros message.
3- using the linfranka to move the Panda robotic arm to the ball position and hit it.
\ No newline at end of file
3- using the linfranka to move the Panda robotic arm to the ball position and hit it.
(coordinates in arm_node are according to the Panda's designation)
This diff is collapsed.
Click to expand it.
src/arm_node.cpp
+
155
−
165
View file @
4af56b61
#include
<ros/ros.h>
#include
<cmath>
#include
<iostream>
...
...
@@ -12,57 +11,51 @@
#include
<franka/gripper.h>
#include
"examples_common.h"
#include
<thread>
//#include <dos.h>
// variables to assign the ball origin coordinates to
float
x_origin
=
0
;
float
y_origin
=
0
;
float
z_origin
=
0
;
float
x
=
0
;
float
y
=
0
;
float
z
=
0
;
float
x_origin
=
0
;
float
y_origin
=
0
;
float
z_origin
=
0
;
float
x_desired
=
0
;
float
y_desired
=
0
;
bool
f
r
ist_callback_loop
=
true
;
bool
fi
r
st_callback_loop
=
true
;
bool
first_call_for_motion_xy_function
=
true
;
bool
first_call_for_hit_ball_function
=
true
;
//
bool hit_ball = false;
bool
hit_ball
_now
=
false
;
std
::
mutex
mymutex
;
std
::
mutex
xy_desired_mutex
;
float
x_desired
;
float
y_desired
;
void
init_pos
(
franka
::
Robot
*
robot
)
void
go_to_initial_position
(
franka
::
Robot
&
robot
)
{
std
::
array
<
double
,
7
>
point_2
=
{{
M_PI_2
,
-
M_PI_4
,
0
,
-
3
*
M_PI_4
,
0
,
M_PI
,
-
M_PI_4
}};
MotionGenerator
motion_point2
(
0.5
,
point_2
);
robot
->
control
(
motion_point2
);
// going to an initial position to start from it
std
::
array
<
double
,
7
>
point_2
=
{{
M_PI_2
,
-
M_PI_4
,
0
,
-
3
*
M_PI_4
,
0
,
M_PI
,
-
M_PI_4
}};
MotionGenerator
initial_position
(
0.5
,
point_2
);
robot
.
control
(
initial_position
);
}
int
gripper_grip
(
franka
::
Gripper
*
gripper
){
int
gripper_grip
(
franka
::
Gripper
&
gripper
){
gripper
->
homing
();
double
grasping_width
=
0.024
;
// Check for the maximum grasping width.
franka
::
GripperState
gripper_state
=
gripper
->
readOnce
();
if
(
gripper_state
.
max_width
<
grasping_width
)
{
std
::
cout
<<
"Object is too large for the current fingers on the gripper."
<<
std
::
endl
;
return
-
1
;
}
// Grasp the object.
if
(
!
gripper
->
grasp
(
grasping_width
,
0.1
,
60
))
{
std
::
cout
<<
"Failed to grasp object."
<<
std
::
endl
;
return
-
1
;
}
gripper
.
homing
();
double
grasping_width
=
0.024
;
// Check for the maximum grasping width.
franka
::
GripperState
gripper_state
=
gripper
.
readOnce
();
if
(
gripper_state
.
max_width
<
grasping_width
)
{
std
::
cout
<<
"Object is too large for the current fingers on the gripper."
<<
std
::
endl
;
return
-
1
;
}
// Grasp the object.
if
(
!
gripper
.
grasp
(
grasping_width
,
0.1
,
60
))
{
std
::
cout
<<
"Failed to grasp object."
<<
std
::
endl
;
return
-
1
;
}
}
void
gripper_releas
(
franka
::
Gripper
*
gripper
){
void
gripper_releas
(
franka
::
Gripper
&
gripper
){
std
::
cout
<<
"Grasped object, will release it now."
<<
std
::
endl
;
gripper
->
stop
();
gripper
.
stop
();
}
int
hit_ball
(
franka
::
Robot
&
robot
){
...
...
@@ -72,34 +65,37 @@ int hit_ball (franka::Robot & robot){
bool
up
=
true
;
std
::
array
<
double
,
16
>
initial_pose
;
std
::
array
<
double
,
16
>
new_pose
;
double
init_z
,
final_z
;
double
init
ial
_z
,
final_z
;
robot
.
control
([
&
time
,
&
new_pose
,
&
up
,
&
initial_pose
,
&
init_z
,
&
final_z
]
robot
.
control
([
&
time
,
&
new_pose
,
&
up
,
&
initial_pose
,
&
init
ial
_z
,
&
final_z
]
(
const
franka
::
RobotState
&
robot_state
,
franka
::
Duration
period
)
->
franka
::
CartesianPose
{
time
+=
period
.
toSec
();
if
(
time
==
0
){
//first_call_for_hit_ball_function) {
time
+=
period
.
toSec
();
if
(
time
==
0
){
//first_call_for_hit_ball_function) {
initial_pose
=
robot_state
.
O_T_EE_c
;
new_pose
=
initial_pose
;
init_z
=
initial_pose
[
14
];
final_z
=
init_z
+
0.2
;
// hight to reach in z 20 cm
init
ial
_z
=
initial_pose
[
14
];
final_z
=
init
ial
_z
+
0.2
;
// hight to reach in z 20 cm
first_call_for_hit_ball_function
=
false
;
}
else
if
((
new_pose
[
14
]
<
final_z
)
&&
up
){
new_pose
[
14
]
+=
0.001
;
ros
::
Duration
(
0.005
).
sleep
();
// sleep for x in second
}
else
if
(
new_pose
[
14
]
>=
final_z
){
up
=
false
;
}
else
if
(
!
up
&&
(
new_pose
[
14
]
>
init_z
)){
new_pose
[
14
]
-=
0.001
;
ros
::
Duration
(
0.005
).
sleep
();
// sleep for x in second
}
else
{
return
franka
::
MotionFinished
(
new_pose
);}
}
else
if
((
new_pose
[
14
]
<
final_z
)
&&
up
){
new_pose
[
14
]
+=
0.001
;
ros
::
Duration
(
0.005
).
sleep
();
// sleep for x in second
}
else
if
(
new_pose
[
14
]
>=
final_z
){
up
=
false
;
}
else
if
(
!
up
&&
(
new_pose
[
14
]
>
initial_z
)){
new_pose
[
14
]
-=
0.001
;
ros
::
Duration
(
0.005
).
sleep
();
// sleep for x in second
}
else
{
return
franka
::
MotionFinished
(
new_pose
);
hit_ball_now
=
false
;
}
return
new_pose
;
return
new_pose
;
});
}
...
...
@@ -119,23 +115,23 @@ int motion_xy (franka::Robot & robot){
std
::
array
<
double
,
16
>
new_pose
;
robot
.
control
([
&
new_pose
,
&
initial_pose
]
(
const
franka
::
RobotState
&
robot_state
,
franka
::
Duration
period
)
->
franka
::
CartesianPose
{
//time += period.toSec();
if
(
first_call_for_motion_xy_function
)
{
initial_pose
=
robot_state
.
O_T_EE_c
;
new_pose
=
initial_pose
;
first_call_for_motion_xy_function
=
false
;
}
{
std
::
unique_lock
<
std
::
mutex
>
lock
(
xy_desired_mutex
);
if
(
x_desired
<=
0.1
&&
y_desired
<=
0.1
){
new_pose
[
12
]
=
initial_pose
[
12
]
+
x_desired
;
new_pose
[
13
]
=
initial_pose
[
13
]
+
y_desired
;
(
const
franka
::
RobotState
&
robot_state
,
franka
::
Duration
period
)
->
franka
::
CartesianPose
{
//time += period.toSec();
if
(
first_call_for_motion_xy_function
)
{
initial_pose
=
robot_state
.
O_T_EE_c
;
new_pose
=
initial_pose
;
first_call_for_motion_xy_function
=
false
;
}
{
std
::
unique_lock
<
std
::
mutex
>
lock
(
xy_desired_mutex
);
if
(
x_desired
<=
0.1
&&
y_desired
<=
0.1
){
new_pose
[
12
]
=
initial_pose
[
12
]
+
x_desired
;
new_pose
[
13
]
=
initial_pose
[
13
]
+
y_desired
;
}
}
}
return
new_pose
;
return
new_pose
;
});
}
...
...
@@ -152,41 +148,39 @@ void coordinates_check (franka::Robot* robot){
//}
}
*/
void
c
lb
k
(
const
pingpong_robot
::
Num
::
ConstPtr
&
msg
)
{
void
c
oordinates_call_bac
k
(
const
pingpong_robot
::
Num
::
ConstPtr
&
msg
)
{
std
::
cout
<<
"x= "
<<
msg
->
x
<<
", y= "
<<
msg
->
y
<<
", z= "
<<
msg
->
z
<<
std
::
endl
;
float
z_threshold
=
z_origin
+
0.1
;
//threshold over origin 10cm
float
x_
now
;
float
y_
now
;
float
z_
now
;
float
x_
current
;
float
y_
current
;
float
z_
current
;
x_
now
=
msg
->
x
;
y_
now
=
msg
->
y
;
z_
now
=
msg
->
z
;
x_
current
=
msg
->
x
;
y_
current
=
msg
->
y
;
z_
current
=
msg
->
z
;
if
(
x_
now
!=
0.0
&&
y_
now
!=
0.0
&&
z_
now
!=
0.0
&&
f
r
ist_callback_loop
){
x_origin
=
x_
now
;
y_origin
=
y_
now
;
z_origin
=
z_
now
;
if
(
x_
current
!=
0.0
&&
y_
current
!=
0.0
&&
z_
current
!=
0.0
&&
fi
r
st_callback_loop
){
x_origin
=
x_
current
;
y_origin
=
y_
current
;
z_origin
=
z_
current
;
f
r
ist_callback_loop
=
false
;
fi
r
st_callback_loop
=
false
;
}
if
(
z_
now
<=
z_threshold
){
//if z <= threshold
if
(
z_
current
<=
z_threshold
){
//if z <= threshold
// call hit function
//hit_ball(robot);
//hit_ball = true;
}
{
//update desired position
//update desired position
in xy plane
std
::
unique_lock
<
std
::
mutex
>
lock
(
xy_desired_mutex
);
x_desired
=
x_origin
-
x_now
;
y_desired
=
y_origin
-
y_now
;
}
x_desired
=
x_origin
-
x_current
;
y_desired
=
y_origin
-
y_current
;
}
}
...
...
@@ -195,81 +189,77 @@ int main(int argc, char** argv) {
std
::
cerr
<<
"Usage: "
<<
argv
[
0
]
<<
" <robot-hostname>"
<<
std
::
endl
;
return
-
1
;
}
ros
::
init
(
argc
,
argv
,
"my_subscriber"
);
ros
::
NodeHandle
n
;
franka
::
Robot
robot
(
argv
[
1
]);
franka
::
Gripper
gripper
(
argv
[
1
]);
setDefaultBehavior
(
robot
);
// First move the robot to a suitable joint configuration
//std::array<double, 7> q_goal = {{0, -M_PI_4, 0, -3 * M_PI_4, 0, M_PI_2, M_PI_4}};
std
::
array
<
double
,
7
>
q_goal
=
{{
M_PI_2
,
-
M_PI_4
,
0
,
-
3
*
M_PI_4
,
0
,
M_PI
,
-
M_PI_4
}};
MotionGenerator
motion_generator
(
0.5
,
q_goal
);
std
::
cout
<<
"WARNING: This example will move the robot! "
<<
"Please make sure to have the user stop button at hand!"
<<
std
::
endl
<<
"Press Enter to continue..."
<<
std
::
endl
;
std
::
cin
.
ignore
();
robot
.
control
(
motion_generator
);
std
::
cout
<<
"Finished moving to initial joint configuration."
<<
std
::
endl
;
// Set additional parameters always before the control loop,
// NEVER in the control loop!
// Set the joint impedance.
robot
.
setJointImpedance
({{
3000
,
3000
,
3000
,
2500
,
2500
,
2000
,
2000
}});
// Set the collision behavior.
std
::
array
<
double
,
7
>
lower_torque_thresholds_nominal
{
{
25.0
,
25.0
,
22.0
,
20.0
,
19.0
,
17.0
,
14.
}};
std
::
array
<
double
,
7
>
upper_torque_thresholds_nominal
{
{
35.0
,
35.0
,
32.0
,
30.0
,
29.0
,
27.0
,
24.0
}};
std
::
array
<
double
,
6
>
upper_force_thresholds_nominal
{{
40.0
,
40.0
,
40.0
,
35.0
,
35.0
,
35.0
}};
std
::
array
<
double
,
6
>
lower_force_thresholds_nominal
{{
30.0
,
30.0
,
30.0
,
25.0
,
25.0
,
25.0
}};
std
::
array
<
double
,
7
>
lower_torque_thresholds_acceleration
{
{
25.0
,
25.0
,
22.0
,
20.0
,
19.0
,
17.0
,
14.0
}};
std
::
array
<
double
,
7
>
upper_torque_thresholds_acceleration
{
{
35.0
,
35.0
,
32.0
,
30.0
,
29.0
,
27.0
,
24.0
}};
std
::
array
<
double
,
6
>
lower_force_thresholds_acceleration
{{
30.0
,
30.0
,
30.0
,
25.0
,
25.0
,
25.0
}};
std
::
array
<
double
,
6
>
upper_force_thresholds_acceleration
{{
40.0
,
40.0
,
40.0
,
35.0
,
35.0
,
35.0
}};
/*
std::array<double, 7> lower_torque_thresholds_acceleration{
{25.0, 50.0, 22.0, 40.0, 19.0, 34.0, 14.0}};
std::array<double, 7> upper_torque_thresholds_acceleration{
{35.0, 70.0, 32.0, 60.0, 29.0, 54.0, 24.0}};
std::array<double, 6> lower_force_thresholds_acceleration{{30.0, 30.0, 60.0, 25.0, 25.0, 50.0}};
std::array<double, 6> upper_force_thresholds_acceleration{{40.0, 40.0, 80.0, 35.0, 35.0, 70.0}};
*/
robot
.
setCollisionBehavior
(
lower_torque_thresholds_acceleration
,
upper_torque_thresholds_acceleration
,
lower_torque_thresholds_nominal
,
upper_torque_thresholds_nominal
,
lower_force_thresholds_acceleration
,
upper_force_thresholds_acceleration
,
lower_force_thresholds_nominal
,
upper_force_thresholds_nominal
);
ros
::
Subscriber
sub
=
n
.
subscribe
(
"coordinates"
,
1
,
clbk
);
init_pos
(
&
robot
);
std
::
thread
mythread
=
std
::
thread
(
[
&
](){
motion_xy
(
robot
);
//hit_ball(robot);
});
/*
gripper_grip(&gripper);
init_pos(&robot);
motion_z(&robot);
init_pos(&robot);
gripper_releas(&gripper);
*/
ros
::
spin
();
//initializing ros
ros
::
init
(
argc
,
argv
,
"arm_motion_node"
);
ros
::
NodeHandle
n
;
//initializing libfranka
franka
::
Robot
robot
(
argv
[
1
]);
franka
::
Gripper
gripper
(
argv
[
1
]);
setDefaultBehavior
(
robot
);
// First move the robot to a suitable joint configuration
//std::array<double, 7> q_goal = {{0, -M_PI_4, 0, -3 * M_PI_4, 0, M_PI_2, M_PI_4}};
std
::
array
<
double
,
7
>
q_goal
=
{{
M_PI_2
,
-
M_PI_4
,
0
,
-
3
*
M_PI_4
,
0
,
M_PI
,
-
M_PI_4
}};
MotionGenerator
motion_generator
(
0.5
,
q_goal
);
std
::
cout
<<
"WARNING: This example will move the robot! "
<<
"Please make sure to have the user stop button at hand!"
<<
std
::
endl
<<
"Press Enter to continue..."
<<
std
::
endl
;
std
::
cin
.
ignore
();
robot
.
control
(
motion_generator
);
std
::
cout
<<
"Finished moving to initial joint configuration."
<<
std
::
endl
;
// Set additional parameters always before the control loop,
// NEVER in the control loop!
// Set the joint impedance.
robot
.
setJointImpedance
({{
3000
,
3000
,
3000
,
2500
,
2500
,
2000
,
2000
}});
// Set the collision behavior.
std
::
array
<
double
,
7
>
lower_torque_thresholds_nominal
{
{
25.0
,
25.0
,
22.0
,
20.0
,
19.0
,
17.0
,
14.
}};
std
::
array
<
double
,
7
>
upper_torque_thresholds_nominal
{
{
35.0
,
35.0
,
32.0
,
30.0
,
29.0
,
27.0
,
24.0
}};
std
::
array
<
double
,
6
>
upper_force_thresholds_nominal
{
{
40.0
,
40.0
,
40.0
,
35.0
,
35.0
,
35.0
}};
std
::
array
<
double
,
6
>
lower_force_thresholds_nominal
{
{
30.0
,
30.0
,
30.0
,
25.0
,
25.0
,
25.0
}};
std
::
array
<
double
,
7
>
lower_torque_thresholds_acceleration
{
{
25.0
,
25.0
,
22.0
,
20.0
,
19.0
,
17.0
,
14.0
}};
std
::
array
<
double
,
7
>
upper_torque_thresholds_acceleration
{
{
35.0
,
35.0
,
32.0
,
30.0
,
29.0
,
27.0
,
24.0
}};
std
::
array
<
double
,
6
>
lower_force_thresholds_acceleration
{
{
30.0
,
30.0
,
30.0
,
25.0
,
25.0
,
25.0
}};
std
::
array
<
double
,
6
>
upper_force_thresholds_acceleration
{
{
40.0
,
40.0
,
40.0
,
35.0
,
35.0
,
35.0
}};
/*
std::array<double, 7> lower_torque_thresholds_acceleration{
{25.0, 50.0, 22.0, 40.0, 19.0, 34.0, 14.0}};
std::array<double, 7> upper_torque_thresholds_acceleration{
{35.0, 70.0, 32.0, 60.0, 29.0, 54.0, 24.0}};
std::array<double, 6> lower_force_thresholds_acceleration{
{30.0, 30.0, 60.0, 25.0, 25.0, 50.0}};
std::array<double, 6> upper_force_thresholds_acceleration{
{40.0, 40.0, 80.0, 35.0, 35.0, 70.0}};
*/
robot
.
setCollisionBehavior
(
lower_torque_thresholds_acceleration
,
upper_torque_thresholds_acceleration
,
lower_torque_thresholds_nominal
,
upper_torque_thresholds_nominal
,
lower_force_thresholds_acceleration
,
upper_force_thresholds_acceleration
,
lower_force_thresholds_nominal
,
upper_force_thresholds_nominal
);
//go_to_initial_position(robot);
//gripper_grip(gripper);
ros
::
Subscriber
sub
=
n
.
subscribe
(
"coordinates"
,
1
,
coordinates_call_back
);
std
::
thread
mythread
=
std
::
thread
([
&
](){
//creating another thrad for the xy-plane tracking
motion_xy
(
robot
);
//hit_ball(robot);
});
//if (//end condition){
//gripper_releas(gripper);
//}
ros
::
spin
();
return
0
;
}
...
...
This diff is collapsed.
Click to expand it.
src/camera_node.cpp
+
186
−
242
View file @
4af56b61
/*
#include <ros/ros.h>
#include <pcl_ros/point_cloud.h>
#include <sensor_msgs/PointCloud.h>
#include <pcl/point_types.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/point_cloud_conversion.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/PCLPointCloud2.h>
#include <pcl_ros/transforms.h>
#include <geometry_msgs/Point.h>
#include <boost/foreach.hpp>
#include <image_transport/image_transport.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include
"opencv2/opencv.hpp"
#include "std_msgs/MultiArrayLayout.h"
#include "std_msgs/MultiArrayDimension.h"
#include "std_msgs/Float32MultiArray.h"
#include "std_msgs/String.h"
#include "std_msgs/Int32.h"
#include
"cv-helpers.hpp"
#include
"pingpong_robot/Num.h"
#include <librealsense2/rs.hpp> // Include RealSense Cross Platform API
#include <librealsense2/rsutil.h>
//#include "example.hpp"
#include <iostream>
#include <sstream>
#include <vector>
#include <algorithm>
#include <cmath>
#include <mutex>
//libfranka/include
//#include <franka/exception.h>
//#include <franka/robot.h>
//#include <franka/gripper.h>
using namespace cv;
using namespace std;
*/
// License: Apache 2.0. See LICENSE file in root directory.
// Copyright(c) 2019 Intel Corporation. All Rights Reserved.
#include
<ros/ros.h>
#include
<librealsense2/rs.hpp>
// Include RealSense Cross Platform API
#include
<librealsense2/rsutil.h>
#include
<iostream>
// for cout
#include
<opencv2/imgproc/imgproc.hpp>
#include
<opencv2/highgui/highgui.hpp>
#include
"opencv2/opencv.hpp"
#include
"cv-helpers.hpp"
#include
"pingpong_robot/Num.h"
#include
<iostream>
#include
<mutex>
#include
<vector>
#include
<sstream>
...
...
@@ -67,14 +20,13 @@ using namespace cv;
using
namespace
std
;
using
namespace
rs2
;
std
::
mutex
mymutex
;
std
::
mutex
mymutex_2
;
int
thresh
=
100
;
//canny
int
max_thresh
=
255
;
RNG
rng
(
12345
);
std
::
mutex
mymutex
;
std
::
mutex
mymutex_2
;
int
x
=
0
;
int
y
=
0
;
double
last_dist
=
0
;
...
...
@@ -84,6 +36,7 @@ int const n=4; //number of values to average
const
int
inpWidth
=
640
;
// Width of network's input image
const
int
inpHeight
=
480
;
// Height of network's input image
const
float
WHRatio
=
inpWidth
/
(
float
)
inpHeight
;
vector
<
vector
<
int
>
>
coordinate
;
std
::
vector
<
int
>
last_coordinate
;
std
::
vector
<
double
>
avg_dist
;
...
...
@@ -102,131 +55,133 @@ static const std::string DEPTH_WINDOW = "Depth window";
void
image_process
(
Mat
frame
)
{
Mat
canny_output
,
frame_HSV
,
frame_threshold
,
frame_gray
,
frame_bin
;
vector
<
vector
<
Point
>
>
contours
;
vector
<
Vec4i
>
hierarchy
;
//cv::namedWindow(OPENCV_WINDOW);
int
low_H
=
164
,
low_S
=
155
,
low_V
=
54
;
int
high_H
=
179
,
high_S
=
255
,
high_V
=
191
;
Mat
canny_output
,
frame_HSV
,
frame_threshold
,
frame_gray
,
frame_bin
;
vector
<
vector
<
Point
>
>
contours
;
vector
<
Vec4i
>
hierarchy
;
//cv::namedWindow(OPENCV_WINDOW);
int
low_H
=
164
,
low_S
=
155
,
low_V
=
54
;
int
high_H
=
179
,
high_S
=
255
,
high_V
=
191
;
last_coordinate
.
resize
(
2
);
avg_dist
.
resize
(
n
);
last_coordinate
.
resize
(
2
);
avg_dist
.
resize
(
n
);
cvtColor
(
frame
,
frame_HSV
,
COLOR_BGR2HSV
);
//Convert from BGR to HSV colorspace
inRange
(
frame_HSV
,
Scalar
(
low_H
,
low_S
,
low_V
),
Scalar
(
high_H
,
high_S
,
high_V
),
frame_threshold
);
// Detect the object based on HSV Range Values
cvtColor
(
frame
,
frame_gray
,
CV_BGR2GRAY
);
/// Convert it to gray
GaussianBlur
(
frame_threshold
,
frame_gray
,
Size
(
9
,
9
),
2
,
2
);
/// Reduce the noise so we avoid false circle detection
vector
<
Vec3f
>
circles
;
/*
/// Apply the Hough Transform to find the circles
HoughCircles( frame_gray, circles, CV_HOUGH_GRADIENT, 1, frame_gray.rows/8, 200, 100, 0, 0 );
// Draw the circles detected
for( size_t i = 0; i < circles.size(); i++ )
{
Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
int radius = cvRound(circles[i][2]);
// circle center
circle( frame, center, 3, Scalar(0,255,0), -1, 8, 0 );
// circle outline
circle( frame, center, radius, Scalar(0,0,255), 3, 8, 0 );
}
*/
Canny
(
frame_threshold
,
canny_output
,
thresh
,
thresh
*
2
,
3
);
/// Detect edges using canny
findContours
(
canny_output
,
contours
,
hierarchy
,
CV_RETR_TREE
,
CV_CHAIN_APPROX_SIMPLE
,
Point
(
0
,
0
)
);
/// Find contours
Mat
drawing
=
Mat
::
zeros
(
canny_output
.
size
(),
CV_8UC3
);
/// Draw contours
for
(
int
i
=
0
;
i
<
contours
.
size
();
i
++
){
Scalar
color
=
Scalar
(
rng
.
uniform
(
0
,
255
),
rng
.
uniform
(
0
,
255
),
rng
.
uniform
(
0
,
255
)
);
drawContours
(
drawing
,
contours
,
i
,
color
,
2
,
8
,
hierarchy
,
0
,
Point
()
);
}
cvtColor
(
drawing
,
drawing
,
CV_BGR2GRAY
);
threshold
(
drawing
,
frame_bin
,
100
,
255
,
THRESH_BINARY
);
// convert grayscale to binary image
Moments
m
=
moments
(
frame_bin
,
true
);
// find moments of the image
Point
p
(
m
.
m10
/
m
.
m00
,
m
.
m01
/
m
.
m00
);
// coordinates of centroid
//cout << p.x <<" "<<p.y <<endl;
if
(
p
.
x
==
-
2147483648
&&
p
.
y
==
-
2147483648
){
ball_detected
=
false
;
cvtColor
(
frame
,
frame_HSV
,
COLOR_BGR2HSV
);
//Convert from BGR to HSV colorspace
inRange
(
frame_HSV
,
Scalar
(
low_H
,
low_S
,
low_V
),
Scalar
(
high_H
,
high_S
,
high_V
),
frame_threshold
);
// Detect the object based on HSV Range Values
cvtColor
(
frame
,
frame_gray
,
CV_BGR2GRAY
);
/// Convert it to gray
GaussianBlur
(
frame_threshold
,
frame_gray
,
Size
(
9
,
9
),
2
,
2
);
/// Reduce the noise so we avoid false circle detection
vector
<
Vec3f
>
circles
;
/*
/// Apply the Hough Transform to find the circles
HoughCircles( frame_gray, circles, CV_HOUGH_GRADIENT, 1, frame_gray.rows/8, 200, 100, 0, 0 );
// Draw the circles detected
for( size_t i = 0; i < circles.size(); i++ )
{
Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
int radius = cvRound(circles[i][2]);
// circle center
circle( frame, center, 3, Scalar(0,255,0), -1, 8, 0 );
// circle outline
circle( frame, center, radius, Scalar(0,0,255), 3, 8, 0 );
}
*/
Canny
(
frame_threshold
,
canny_output
,
thresh
,
thresh
*
2
,
3
);
/// Detect edges using canny
findContours
(
canny_output
,
contours
,
hierarchy
,
CV_RETR_TREE
,
CV_CHAIN_APPROX_SIMPLE
,
Point
(
0
,
0
)
);
/// Find contours
Mat
drawing
=
Mat
::
zeros
(
canny_output
.
size
(),
CV_8UC3
);
/// Draw contours
for
(
int
i
=
0
;
i
<
contours
.
size
();
i
++
){
Scalar
color
=
Scalar
(
rng
.
uniform
(
0
,
255
),
rng
.
uniform
(
0
,
255
),
rng
.
uniform
(
0
,
255
)
);
drawContours
(
drawing
,
contours
,
i
,
color
,
2
,
8
,
hierarchy
,
0
,
Point
()
);
}
cvtColor
(
drawing
,
drawing
,
CV_BGR2GRAY
);
threshold
(
drawing
,
frame_bin
,
100
,
255
,
THRESH_BINARY
);
// convert grayscale to binary image
Moments
m
=
moments
(
frame_bin
,
true
);
// find moments of the image
Point
p
(
m
.
m10
/
m
.
m00
,
m
.
m01
/
m
.
m00
);
// coordinates of centroid
//cout << p.x <<" "<<p.y <<endl;
if
(
p
.
x
==
-
2147483648
&&
p
.
y
==
-
2147483648
){
ball_detected
=
false
;
}
else
ball_detected
=
true
;
if
(
p
.
x
>=
0
&&
p
.
x
<=
1000
)
{
//neglect large numbers sent
if
(
first_reading
)
{
last_coordinate
.
at
(
0
)
=
p
.
x
;
last_coordinate
.
at
(
1
)
=
p
.
y
;
first_reading
=
false
;
}
else
ball_detected
=
true
;
if
(
p
.
x
>=
0
&&
p
.
x
<=
1000
)
{
//neglect large numbers sent
if
(
first_reading
)
{
last_coordinate
.
at
(
0
)
=
p
.
x
;
last_coordinate
.
at
(
1
)
=
p
.
y
;
first_reading
=
false
;
}
if
(
abs
(
p
.
x
-
last_coordinate
.
at
(
0
))
<
150
&&
abs
(
p
.
y
-
last_coordinate
.
at
(
1
))
<
150
)
{
coordinate
.
push_back
(
std
::
vector
<
int
>
{
p
.
x
,
p
.
y
});
if
(
coordinate
.
size
()
==
n
-
1
)
{
//std::sort(&avg_x.at(0),&avg_x.at(n-1));
//std::sort(&avg_y.at(0),&avg_y.at(n-1));
//for(auto const & x : coordinate) {
// std::cout << "H" << x.at(0) << " " << x.at(1) << std::endl;
//}
std
::
sort
(
coordinate
.
begin
(),
coordinate
.
end
(),
if
(
abs
(
p
.
x
-
last_coordinate
.
at
(
0
))
<
150
&&
abs
(
p
.
y
-
last_coordinate
.
at
(
1
))
<
150
)
{
coordinate
.
push_back
(
std
::
vector
<
int
>
{
p
.
x
,
p
.
y
});
if
(
coordinate
.
size
()
==
n
-
1
)
{
//std::sort(&avg_x.at(0),&avg_x.at(n-1));
//std::sort(&avg_y.at(0),&avg_y.at(n-1));
//for(auto const & x : coordinate) {
// std::cout << "H" << x.at(0) << " " << x.at(1) << std::endl;
//}
std
::
sort
(
coordinate
.
begin
(),
coordinate
.
end
(),
[](
std
::
vector
<
int
>
const
&
a
,
std
::
vector
<
int
>
const
&
b
){
if
(
a
.
at
(
0
)
>
b
.
at
(
0
))
return
true
;
if
(
a
.
at
(
0
)
<
b
.
at
(
0
))
return
false
;
return
a
.
at
(
1
)
>
b
.
at
(
1
);
});
//for(auto const & x : coordinate) {
// std::cout << "L" << x.at(0) << " " << x.at(1) << std::endl;
//}
{
std
::
unique_lock
<
std
::
mutex
>
lock
(
mymutex
);
x
=
coordinate
.
at
(
n
/
2
).
at
(
0
);
y
=
coordinate
.
at
(
n
/
2
).
at
(
1
);
//std::cout <<"coordinates {"<< x<< ", " << y <<"}"<<std::endl;
}
coordinate
.
clear
();
}
if
(
a
.
at
(
0
)
>
b
.
at
(
0
))
return
true
;
if
(
a
.
at
(
0
)
<
b
.
at
(
0
))
return
false
;
return
a
.
at
(
1
)
>
b
.
at
(
1
);
});
//for(auto const & x : coordinate) {
// std::cout << "L" << x.at(0) << " " << x.at(1) << std::endl;
//}
{
std
::
unique_lock
<
std
::
mutex
>
lock
(
mymutex
);
x
=
coordinate
.
at
(
n
/
2
).
at
(
0
);
y
=
coordinate
.
at
(
n
/
2
).
at
(
1
);
//std::cout <<"coordinates {"<< x<< ", " << y <<"}"<<std::endl;
}
coordinate
.
clear
();
}
last_coordinate
.
at
(
0
)
=
p
.
x
;
last_coordinate
.
at
(
1
)
=
p
.
y
;
}
else
if
(
last_coordinate
.
at
(
0
)
==
0
&&
last_coordinate
.
at
(
1
)
==
0
){
coordinate
.
push_back
(
std
::
vector
<
int
>
{
p
.
x
,
p
.
y
});
last_coordinate
.
at
(
0
)
=
p
.
x
;
last_coordinate
.
at
(
1
)
=
p
.
y
;
}
else
{
}
else
if
(
last_coordinate
.
at
(
0
)
==
0
&&
last_coordinate
.
at
(
1
)
==
0
){
coordinate
.
push_back
(
std
::
vector
<
int
>
{
p
.
x
,
p
.
y
});
last_coordinate
.
at
(
0
)
=
p
.
x
;
last_coordinate
.
at
(
1
)
=
p
.
y
;
}
else
{
//cout << "coordinate reading cancelled"<<endl;
}
}
circle
(
frame
,
p
,
5
,
Scalar
(
128
,
0
,
0
),
-
1
);
}
circle
(
frame
,
p
,
5
,
Scalar
(
128
,
0
,
0
),
-
1
);
//cv::SimpleBlobDetector detector;
SimpleBlobDetector
::
Params
params
;
std
::
vector
<
KeyPoint
>
keypoints
;
//cv::SimpleBlobDetector detector;
SimpleBlobDetector
::
Params
params
;
std
::
vector
<
KeyPoint
>
keypoints
;
// Change thresholds
params
.
minThreshold
=
10
;
params
.
maxThreshold
=
300
;
// Change thresholds
params
.
minThreshold
=
10
;
params
.
maxThreshold
=
300
;
// Filter by Area.
params
.
filterByArea
=
true
;
params
.
minArea
=
50
;
// Filter by Area.
params
.
filterByArea
=
true
;
params
.
minArea
=
50
;
// Filter by Circularity
params
.
filterByCircularity
=
true
;
params
.
minCircularity
=
0.5
;
// Filter by Circularity
params
.
filterByCircularity
=
true
;
params
.
minCircularity
=
0.5
;
// Filter by Convexity
params
.
filterByConvexity
=
true
;
params
.
minConvexity
=
0.2
;
// Filter by Convexity
params
.
filterByConvexity
=
true
;
params
.
minConvexity
=
0.2
;
// Filter by Inertia
params
.
filterByInertia
=
true
;
params
.
minInertiaRatio
=
0.1
;
cv
::
Ptr
<
cv
::
SimpleBlobDetector
>
detector
=
cv
::
SimpleBlobDetector
::
create
(
params
);
// Filter by Inertia
params
.
filterByInertia
=
true
;
params
.
minInertiaRatio
=
0.1
;
cv
::
Ptr
<
cv
::
SimpleBlobDetector
>
detector
=
cv
::
SimpleBlobDetector
::
create
(
params
);
detector
->
detect
(
frame
,
keypoints
);
Mat
im_with_keypoints
;
cv
::
drawKeypoints
(
frame
,
keypoints
,
im_with_keypoints
,
Scalar
(
0
,
0
,
255
),
DrawMatchesFlags
::
DRAW_RICH_KEYPOINTS
);
detector
->
detect
(
frame
,
keypoints
);
Mat
im_with_keypoints
;
cv
::
drawKeypoints
(
frame
,
keypoints
,
im_with_keypoints
,
Scalar
(
0
,
0
,
255
),
DrawMatchesFlags
::
DRAW_RICH_KEYPOINTS
);
...
...
@@ -268,96 +223,85 @@ void calculate_distances (const struct rs2_intrinsics * intrin, Mat depth_mat,
int
main
(
int
argc
,
char
*
argv
[])
try
{
ros
::
init
(
argc
,
argv
,
"image_proccess"
);
ros
::
init
(
argc
,
argv
,
"image_proccess
ing_node
"
);
ros
::
NodeHandle
nh
;
ros
::
Publisher
coordinates_pub_
=
nh
.
advertise
<
pingpong_robot
::
Num
>
(
"coordinates"
,
100
);
int
itiration_number
=
0
;
float
point_3d
[
3
];
rs2
::
pipeline
pipe
;
auto
config
=
pipe
.
start
();
auto
profile
=
config
.
get_stream
(
RS2_STREAM_COLOR
)
.
as
<
video_stream_profile
>
();
rs2
::
align
align_to
(
RS2_STREAM_COLOR
);
auto
stream
=
config
.
get_stream
(
RS2_STREAM_DEPTH
).
as
<
rs2
::
video_stream_profile
>
();
auto
intrinsics
=
stream
.
get_intrinsics
();
// Calibration data
//Crop image to network's input size
Size
cropSize
;
if
(
profile
.
width
()
/
(
float
)
profile
.
height
()
>
WHRatio
)
{
cropSize
=
Size
(
static_cast
<
int
>
(
profile
.
height
()
*
WHRatio
),
profile
.
height
());
}
else
{
cropSize
=
Size
(
profile
.
width
(),
static_cast
<
int
>
(
profile
.
width
()
/
WHRatio
));
}
float
point_3d
[
3
];
Rect
crop
(
Point
((
profile
.
width
()
-
cropSize
.
width
)
/
2
,
(
profile
.
height
()
-
cropSize
.
height
)
/
2
),
cropSize
);
//Window to show detections
const
auto
window_name
=
"Display Image"
;
namedWindow
(
window_name
,
WINDOW_AUTOSIZE
);
while
(
getWindowProperty
(
window_name
,
WND_PROP_AUTOSIZE
)
>=
0
)
{
// Wait for the next set of frames
auto
data
=
pipe
.
wait_for_frames
();
// Make sure the frames are spatially aligned
data
=
align_to
.
process
(
data
);
auto
color_frame
=
data
.
get_color_frame
();
auto
depth_frame
=
data
.
get_depth_frame
();
// If we only received new depth frame,
// but the color did not update, continue
static
int
last_frame_number
=
0
;
if
(
color_frame
.
get_frame_number
()
==
last_frame_number
)
continue
;
last_frame_number
=
color_frame
.
get_frame_number
();
// Convert RealSense frame to OpenCV matrix:
auto
color_mat
=
frame_to_mat
(
color_frame
);
auto
depth_mat
=
depth_frame_to_meters
(
pipe
,
depth_frame
);
image_process
(
color_mat
);
if
(
ball_detected
){
calculate_distances
(
&
intrinsics
,
depth_mat
,
point_3d
);
if
(
itiration_number
<
5
)
itiration_number
++
;
//publish msg
if
(
itiration_number
>=
4
){
pingpong_robot
::
Num
msg
;
msg
.
x
=
point_3d
[
0
]
;
msg
.
y
=
point_3d
[
1
]
;
msg
.
z
=
point_3d
[
2
]
;
if
(
msg
.
x
!=
0.0
&&
msg
.
y
!=
0.0
&&
msg
.
z
!=
0.0
)
if
(
first_distance
){
last_dist
=
msg
.
z
;
coordinates_pub_
.
publish
(
msg
);
first_distance
=
false
;
}
if
(
!
first_distance
&&
abs
(
last_dist
-
msg
.
z
)
<=
0.3
){
coordinates_pub_
.
publish
(
msg
);
}
}
}
rs2
::
pipeline
pipe
;
auto
config
=
pipe
.
start
();
auto
profile
=
config
.
get_stream
(
RS2_STREAM_COLOR
)
.
as
<
video_stream_profile
>
();
rs2
::
align
align_to
(
RS2_STREAM_COLOR
);
imshow
(
window_name
,
color_mat
);
if
(
waitKey
(
1
)
>=
0
)
break
;
auto
stream
=
config
.
get_stream
(
RS2_STREAM_DEPTH
).
as
<
rs2
::
video_stream_profile
>
();
auto
intrinsics
=
stream
.
get_intrinsics
();
// Calibration data
}
//Crop image to network's input size
Size
cropSize
;
if
(
profile
.
width
()
/
(
float
)
profile
.
height
()
>
WHRatio
){
cropSize
=
Size
(
static_cast
<
int
>
(
profile
.
height
()
*
WHRatio
),
profile
.
height
());
}
else
{
cropSize
=
Size
(
profile
.
width
(),
static_cast
<
int
>
(
profile
.
width
()
/
WHRatio
));
}
return
EXIT_SUCCESS
;
Rect
crop
(
Point
((
profile
.
width
()
-
cropSize
.
width
)
/
2
,
(
profile
.
height
()
-
cropSize
.
height
)
/
2
),
cropSize
);
//Window to show detections
const
auto
window_name
=
"Display Image"
;
namedWindow
(
window_name
,
WINDOW_AUTOSIZE
);
while
(
getWindowProperty
(
window_name
,
WND_PROP_AUTOSIZE
)
>=
0
){
// Wait for the next set of frames
auto
data
=
pipe
.
wait_for_frames
();
// Make sure the frames are spatially aligned
data
=
align_to
.
process
(
data
);
auto
color_frame
=
data
.
get_color_frame
();
auto
depth_frame
=
data
.
get_depth_frame
();
// If we only received new depth frame,
// but the color did not update, continue
static
int
last_frame_number
=
0
;
if
(
color_frame
.
get_frame_number
()
==
last_frame_number
)
continue
;
last_frame_number
=
color_frame
.
get_frame_number
();
// Convert RealSense frame to OpenCV matrix:
auto
color_mat
=
frame_to_mat
(
color_frame
);
auto
depth_mat
=
depth_frame_to_meters
(
pipe
,
depth_frame
);
image_process
(
color_mat
);
if
(
ball_detected
){
calculate_distances
(
&
intrinsics
,
depth_mat
,
point_3d
);
if
(
itiration_number
<
5
)
itiration_number
++
;
//publish msg
if
(
itiration_number
>=
4
){
pingpong_robot
::
Num
msg
;
msg
.
x
=
point_3d
[
0
]
;
msg
.
y
=
point_3d
[
1
]
;
msg
.
z
=
point_3d
[
2
]
;
if
(
msg
.
x
!=
0.0
&&
msg
.
y
!=
0.0
&&
msg
.
z
!=
0.0
){
if
(
first_distance
){
last_dist
=
msg
.
z
;
coordinates_pub_
.
publish
(
msg
);
first_distance
=
false
;
}
else
if
(
!
first_distance
&&
abs
(
last_dist
-
msg
.
z
)
<=
0.3
){
coordinates_pub_
.
publish
(
msg
);
}
}
}
}
imshow
(
window_name
,
color_mat
);
if
(
waitKey
(
1
)
>=
0
)
break
;
}
return
EXIT_SUCCESS
;
}
catch
(
const
rs2
::
error
&
e
)
{
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment