Skip to content

Commit

Permalink
move to CT Drone localization algorithm
Browse files Browse the repository at this point in the history
  • Loading branch information
franklinselva committed Nov 14, 2023
1 parent 8b8622f commit 01a416d
Show file tree
Hide file tree
Showing 7 changed files with 105 additions and 121 deletions.
18 changes: 13 additions & 5 deletions ColorTracker.gen
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ component ColorTracker {
lang "c";
doc "A GenoM module for the ColorTracker using a monocular camera.";

codels-require "opencv4, felix-g3utils, vision-idl";
codels-require "opencv4, felix-g3utils, vision-idl, eigen3";
uses or_pose_estimator;

exception e_BAD_IMAGE_PORT { short code; string<128> message; };
Expand All @@ -53,13 +53,22 @@ component ColorTracker {
float width, height;
};

struct CameraInfo {
float focal_length;
float field_of_view;
float pixel_size;

Size image_size;
};

/* -------------------------- IDS --------------------------- */
ids {
or::sensor::intrinsics intrinsics; // Camera intrinsics
or::sensor::extrinsics extrinsics; // Camera extrinsics
or::sensor::frame image_frame; // Image frame
or::ColorTrack::PlateSequence plates, all_detected_plates; // The plates sequence
or::ColorTrack::ColorInfo color; // The color of the object to track
CameraInfo camera_info; // Camera information

or_pose_estimator::state frame_pose;
Pose camera_pose;
Expand Down Expand Up @@ -96,7 +105,6 @@ component ColorTracker {
port multiple out or::sensor::frame output {
doc "The image frame with the detected object.";
};


/* ------------------ TASK DEFINITION -------------------- */
task track {
Expand All @@ -113,16 +121,16 @@ component ColorTracker {

activity color_track() {
doc "Detect the color and keep track of coordinates from the image.";
after set_color, set_distance_threshold, set_object_size, set_focal_length, set_camera_pose, set_map_size;
after set_color, set_distance_threshold, set_camera_pose, set_map_size;
task track;

codel <start> FetchPorts(port in Frame, port in Intrinsics, port in Extrinsics, port in DronePose,
ids out plates, ids out all_detected_plates, ids in debug)
yield pause::start, poll;
codel <poll> FetchDataFromPorts(port in Frame, port in Intrinsics, port in Extrinsics, port in DronePose,
ids out image_frame, ids out intrinsics, ids out extrinsics, ids out frame_pose, ids in debug)
ids out image_frame, ids out intrinsics, ids out extrinsics, ids out camera_info, ids out frame_pose, ids in debug)
yield pause::poll, main, ether;
codel <main> TrackObject(ids in start_tracking, ids in image_frame, ids in intrinsics, ids in extrinsics, ids in color, ids in object_size, ids in focal_length,
codel <main> TrackObject(ids in start_tracking, ids in image_frame, ids in camera_info, ids in color,
port in DronePose, ids in distance_threshold, ids out plates, ids inout all_detected_plates, ids in camera_pose,
port out PlatesInfo, port out output, ids in debug, ids in show_frames, ids in publish_og)
yield main, poll, publish, ether;
Expand Down
2 changes: 1 addition & 1 deletion Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ distclean-local:
merge: merge-interactive
merge-%:
cd ${top_srcdir} && ${GENOM3} \
skeleton -l 'c++' -m $* ColorTracker.gen
skeleton -l 'c++' -m $* ColorTracker.gen

# documentation
dist_doc_DATA= README.html README.adoc
Expand Down
8 changes: 5 additions & 3 deletions autoconf/ag_templates.m4
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ AC_DEFUN([AG_OPT_TEMPLATES],
a template is also supported]),
[ag_templates="$withval"])
if test -n "$ag_templates"; then
if test "x$ag_templates" = xno; then
ag_templates=
elif test -n "$ag_templates"; then
# user may want to pass options to templates
AC_DISABLE_OPTION_CHECKING
Expand Down Expand Up @@ -51,8 +53,8 @@ AC_DEFUN([AG_OUTPUT_TEMPLATES],
AC_MSG_NOTICE([configuring for $t])
# run genom
AC_MSG_NOTICE([running $ag_genom $t -C $tdir $ag_input])
eval $ag_genom $t -C $tdir $ag_input
AC_MSG_NOTICE([running $ag_genom -C $srcdir $t -C $tdir $ag_input])
eval $ag_genom -C $srcdir $t -C $tdir $ag_input
if test $? != 0; then
rm -rf "$tdir"
AC_MSG_ERROR([cannot generate template $t], 2)
Expand Down
103 changes: 29 additions & 74 deletions codels/ColorTracker_track_codels.cc
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ FetchDataFromPorts(const ColorTracker_Frame *Frame,
or_sensor_frame *image_frame,
or_sensor_intrinsics *intrinsics,
or_sensor_extrinsics *extrinsics,
ColorTracker_CameraInfo *camera_info,
or_pose_estimator_state *frame_pose, bool debug,
const genom_context self)
{
Expand Down Expand Up @@ -161,6 +162,26 @@ FetchDataFromPorts(const ColorTracker_Frame *Frame,
*intrinsics = *IntrinsicsData;
*extrinsics = *ExtrinsicsData;

// Calculate Focal Distance from intrinsics
camera_info->focal_length = intrinsics->calib.fx / 1000; // focal length in mm

// Calculate the FoV of the camera
camera_info->field_of_view = 2 * atan(image_frame->width / (2 * camera_info->focal_length * 1000));
// double fov_y = 2 * atan(intrinsics->height / (2 * focal_length));

// Pixel size in mm
camera_info->pixel_size = (2 * camera_info->focal_length * tan(camera_info->field_of_view / 2)) / image_frame->width;
// double pixel_size_y = (2 * focal_length * tan(fov_y / 2)) / intrinsics->height;

// Image Size
camera_info->image_size.width = image_frame->width;
camera_info->image_size.height = image_frame->height;

if (debug)
{
CODEL_LOG_INFO(2, 1, "Camera Info: Focal Length: %f, Field of View: %f, Pixel Size: %f", camera_info->focal_length, camera_info->field_of_view, camera_info->pixel_size);
}

return ColorTracker_main;
}

Expand All @@ -175,10 +196,8 @@ FetchDataFromPorts(const ColorTracker_Frame *Frame,
*/
genom_event
TrackObject(bool start_tracking, const or_sensor_frame *image_frame,
const or_sensor_intrinsics *intrinsics,
const or_sensor_extrinsics *extrinsics,
const ColorTracker_CameraInfo *camera_info,
const or_ColorTrack_ColorInfo *color,
const ColorTracker_Size *object_size, float focal_length,
const ColorTracker_DronePose *DronePose,
float distance_threshold,
or_ColorTrack_PlateSequence *plates,
Expand Down Expand Up @@ -278,84 +297,20 @@ TrackObject(bool start_tracking, const or_sensor_frame *image_frame,
return ColorTracker_e_BAD_POSE_PORT(&msg, self);
}

// Convert pixel coordinates to cartesian coordinates
double camera_x = 0.0, camera_y = 0.0, camera_z = 0.0;
Tracking::imageToWorldCoordinates(image_x, image_y, focal_length, bounding_box, intrinsics, object_size->width, camera_x, camera_y, camera_z);

// Camera to object transformation matrix
// TODO: Refactor using << operator
cv::Mat cam_to_object = cv::Mat::eye(4, 4, CV_64F);
cam_to_object.at<double>(0, 3) = camera_x;
cam_to_object.at<double>(1, 3) = camera_y;
cam_to_object.at<double>(2, 3) = camera_z;

// Camera to drone transformation matrix
cv::Mat cam_to_drone = cv::Mat::zeros(4, 4, CV_64F);
double roll = camera_pose->roll;
double pitch = camera_pose->pitch;
double yaw = camera_pose->yaw;

// REF: https://web.mit.edu/2.05/www/Handout/HO2.PDF
cam_to_drone.at<double>(0, 0) = cos(yaw) * cos(pitch);
cam_to_drone.at<double>(0, 1) = cos(yaw) * sin(pitch) * sin(roll) - sin(yaw) * cos(roll);
cam_to_drone.at<double>(0, 2) = cos(yaw) * sin(pitch) * cos(roll) + sin(yaw) * sin(roll);
cam_to_drone.at<double>(1, 0) = sin(yaw) * cos(pitch);
cam_to_drone.at<double>(1, 1) = sin(yaw) * sin(pitch) * sin(roll) + cos(yaw) * cos(roll);
cam_to_drone.at<double>(1, 2) = sin(yaw) * sin(pitch) * cos(roll) - cos(yaw) * sin(roll);
cam_to_drone.at<double>(2, 0) = -sin(pitch);
cam_to_drone.at<double>(2, 1) = cos(pitch) * sin(roll);
cam_to_drone.at<double>(2, 2) = cos(pitch) * cos(roll);
cam_to_drone.at<double>(3, 3) = 1.0;
cam_to_drone.at<double>(0, 3) = camera_pose->x;
cam_to_drone.at<double>(1, 3) = camera_pose->y;
cam_to_drone.at<double>(2, 3) = camera_pose->z;

// Drone to world transformation matrix
cv::Mat drone_to_world = cv::Mat::zeros(4, 4, CV_64F);
cv::Mat R = cv::Mat::zeros(3, 3, CV_64F);
cv::Mat q = cv::Mat::zeros(4, 1, CV_64F);

q.at<double>(0, 0) = DronePoseData->att._value.qx;
q.at<double>(1, 0) = DronePoseData->att._value.qy;
q.at<double>(2, 0) = DronePoseData->att._value.qz;
q.at<double>(3, 0) = DronePoseData->att._value.qw;
Tracking::getRotationMatrix(q, R);

drone_to_world.at<double>(0, 0) = R.at<double>(0, 0);
drone_to_world.at<double>(0, 1) = R.at<double>(0, 1);
drone_to_world.at<double>(0, 2) = R.at<double>(0, 2);
drone_to_world.at<double>(1, 0) = R.at<double>(1, 0);
drone_to_world.at<double>(1, 1) = R.at<double>(1, 1);
drone_to_world.at<double>(1, 2) = R.at<double>(1, 2);
drone_to_world.at<double>(2, 0) = R.at<double>(2, 0);
drone_to_world.at<double>(2, 1) = R.at<double>(2, 1);
drone_to_world.at<double>(2, 2) = R.at<double>(2, 2);
drone_to_world.at<double>(3, 3) = 1.0;
drone_to_world.at<double>(0, 3) = DronePoseData->pos._value.x;
drone_to_world.at<double>(1, 3) = DronePoseData->pos._value.y;
drone_to_world.at<double>(2, 3) = DronePoseData->pos._value.z;

// Camera to world transformation matrix
cv::Mat cam_to_world = cv::Mat::eye(4, 4, CV_64F);
cam_to_world = drone_to_world * cam_to_drone * cam_to_object;

// Extract translation from transformation matrix
cv::Mat world_coord = cv::Mat::zeros(3, 1, CV_64F);
world_coord.at<double>(0, 0) = cam_to_world.at<double>(0, 3);
world_coord.at<double>(1, 0) = cam_to_world.at<double>(1, 3);
world_coord.at<double>(2, 0) = cam_to_world.at<double>(2, 3);
double target_x = 0.0, target_y = 0.0, target_z = 0.0;
Tracking::imageToWorldCoordinates(image_x, image_y, camera_info, DronePoseData, target_x, target_y, target_z);

if (debug)
{
CODEL_LOG_WARNING("World coordinates: (%f, %f, %f)", world_coord.at<double>(0, 0), world_coord.at<double>(1, 0), world_coord.at<double>(2, 0));
CODEL_LOG_WARNING("Drone coorindates: (%f, %f, %f)", drone_to_world.at<double>(0, 3), drone_to_world.at<double>(1, 3), drone_to_world.at<double>(2, 3));
CODEL_LOG_WARNING("World coordinates: (%f, %f, %f)", target_x, target_y, target_z);
CODEL_LOG_WARNING("Drone coorindates: (%f, %f, %f)", DronePoseData->pos._value.x, DronePoseData->pos._value.y, DronePoseData->pos._value.z);
}

// Convert relative world coordinates to absolute world coordinates
or_ColorTrack_PlateInfo plate;
plate.coord.x = world_coord.at<double>(0, 0);
plate.coord.y = world_coord.at<double>(1, 0);
plate.coord.z = world_coord.at<double>(2, 0);
plate.coord.x = target_x;
plate.coord.y = target_y;
plate.coord.z = target_z;
plate.index = all_detected_plates->seq._length + 1;

all_detected_plates->seq._buffer[all_detected_plates->seq._length] = plate;
Expand Down
7 changes: 5 additions & 2 deletions codels/Makefile.am
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@

lib_LTLIBRARIES = libColorTracker_codels.la
lib_LTLIBRARIES =

# ColorTracker codels library
lib_LTLIBRARIES += libColorTracker_codels.la

libColorTracker_codels_la_SOURCES = ColorTracker_c_types.h
libColorTracker_codels_la_SOURCES += ColorTracker_codels.cc
Expand All @@ -12,7 +15,7 @@ libColorTracker_codels_la_LIBADD += $(codels_requires_LIBS)
libColorTracker_codels_la_LDFLAGS = -release $(PACKAGE_VERSION)


# idl mappings
# idl mappings
BUILT_SOURCES= ColorTracker_c_types.h
CLEANFILES= ${BUILT_SOURCES}

Expand Down
84 changes: 50 additions & 34 deletions codels/tracking.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

#include <opencv2/opencv.hpp>
#include <opencv2/core/types_c.h>
#include <eigen3/Eigen/Geometry>

#include "ColorTracker_c_types.h"
#include "acColorTracker.h"
Expand Down Expand Up @@ -112,34 +113,49 @@ namespace Tracking
return object_found;
}

void imageToWorldCoordinates(const int &image_x, const int &image_y, const double &f, const cv::Rect bounding_box, const or_sensor_intrinsics *intrinsics, double object_width, double &xw, double &yw, double &zw)
void imageToWorldCoordinates(const int &image_x, const int &image_y,
const ColorTracker_CameraInfo *camera_info,
const or_pose_estimator_state *drone_state,
double &xw, double &yw, double &zw)
{
// REF: https://stackoverflow.com/questions/12007775/to-calculate-world-coordinates-from-screen-coordinates-with-opencv
auto fx = intrinsics->calib.fx;
auto fy = intrinsics->calib.fy;
auto cx = intrinsics->calib.cx;
auto cy = intrinsics->calib.cy;

// F = (P x D) / W
// auto f = 480.0; // Fixed focal length for object of size 0.5m at 1m distance
// D’ = (W x F) / P
auto z = object_width * f / (bounding_box.width);

// x_world = (x_screen - c_x) * z_world / f_x
// y_world = (y_screen - c_y) * z_world / f_y

// This is calculated in Camera coordinate system
auto xw_tmp = (image_x - cx) * z / fx;
auto yw_tmp = (image_y - cy) * z / fy;
auto zw_tmp = z;

// Extrinsics matrix is considered to be identity, so we can directly use the camera coordinates
// X axis of camera is -Y axis of drone
// Y axis of camera is -X axis of drone
// Z axis of camera is -Z axis of drone
xw = -yw_tmp;
yw = -xw_tmp;
zw = -zw_tmp;
// Get the camera parameters
double focal_length = camera_info->focal_length;
double field_of_view = camera_info->field_of_view;
double pixel_size = camera_info->pixel_size;
double image_width = camera_info->image_size.width;
double image_height = camera_info->image_size.height;

double drone_x = drone_state->pos._value.x;
double drone_y = drone_state->pos._value.y;
double drone_z = drone_state->pos._value.z;
double drone_qx = drone_state->att._value.qx;
double drone_qy = drone_state->att._value.qy;
double drone_qz = drone_state->att._value.qz;
double drone_qw = drone_state->att._value.qw;

// Calculate the distance to target with the pinhole camera model
Eigen::Vector3d A(-(image_y - image_height / 2) * pixel_size,
-(image_x - image_width / 2) * pixel_size,
-focal_length);

// Build Quaternion vector
Eigen::Quaternion<double> Q(drone_qw, drone_qx, drone_qy, drone_qz);

// Vector to Blob in world frame
Eigen::Vector3d Vw = Q * A;

// Build Drone Position vector
Eigen::Vector3d P(drone_x, drone_y, drone_z);

// Calculate Intersection
double alpha = P(2) / Vw(2);

// Calculate Blob Position in world frame
Eigen::Vector3d Pw = P - alpha * Vw;

xw = Pw(0);
yw = Pw(1);
zw = Pw(2);
}

void getRotationMatrix(const cv::Mat quaternion, cv::Mat R)
Expand Down Expand Up @@ -223,23 +239,23 @@ namespace Tracking

or_ColorTrack_PlateInfo findTargetPoint(const std::vector<or_ColorTrack_PlateInfo> &points)
{
// Choose an arbitrary starting point as the initial target point
// Apply K means to find the target point
or_ColorTrack_PlateInfo target = points[0];
double minAverageDistance = std::numeric_limits<double>::max();

for (const auto &p : points)
for (size_t i = 0; i < points.size(); ++i)
{
double totalDistance = 0.0;
for (const auto &other : points)
double averageDistance = 0;
for (size_t j = 0; j < points.size(); ++j)
{
totalDistance += distance(p, other);
averageDistance += distance(points[i], points[j]);
}
averageDistance /= points.size();

double averageDistance = totalDistance / points.size();
if (averageDistance < minAverageDistance)
{
target = p;
minAverageDistance = averageDistance;
target = points[i];
}
}

Expand Down
4 changes: 2 additions & 2 deletions configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ PKG_CHECK_MODULES(requires, [
vision-idl
])
PKG_CHECK_MODULES(codels_requires, [
opencv4, felix-g3utils, vision-idl
opencv4, felix-g3utils, vision-idl, eigen3
])

AC_PATH_PROG(GENOM3, [genom3], [no])
Expand All @@ -44,7 +44,7 @@ fi

dnl --with-templates option
AG_OPT_TEMPLATES([$GENOM3 ],
[$srcdir/ColorTracker.gen])
[ColorTracker.gen])

dnl Doc
AM_MISSING_PROG([ASCIIDOCTOR], [asciidoctor])
Expand Down

0 comments on commit 01a416d

Please sign in to comment.