Subversion Repositories Code-Repo

Compare Revisions

Ignore whitespace Rev 286 → Rev 287

/Classwork/ME5524 - Bayesian Robo/Final Project/DepthProcessor.cpp
0,0 → 1,459
#include "DepthProcessor.h"
 
// Limit the processing buffer to X frames
QSemaphore processorBuffer(3);
 
DepthProcessor::DepthProcessor(QObject *parent)
: QThread(parent) {
 
moveToThread(this);
 
topLeftImage = NULL;
topRightImage = NULL;
botLeftImage = NULL;
botRightImage = NULL;
 
rawDepthImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
lastValidData16 = cv::Mat(Y_RES, X_RES, CV_16UC1, cv::Scalar(0));
lastValidDepthImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
lastValidProcessed = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
 
fgMaskMOG = cv::Mat(Y_RES, X_RES, CV_8UC1);
movementMaskImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
fgMaskTmp = cv::Mat(Y_RES, X_RES, CV_32FC1);
fgMaskRaw = cv::Mat(Y_RES, X_RES, CV_8UC1);
movementMaskRawImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
fgMaskAverage = cv::Mat(Y_RES, X_RES, CV_8UC1);
movementMaskAverageImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
pMOG = new cv::BackgroundSubtractorMOG2(BACKGROUND_SUBTRACTOR_HISTORY, BACKGROUND_SUBTRACTOR_NMIXTURES, false);
 
rawHorizonImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
lastValidHorizonImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
overlayHorizonImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
 
depthHorizon = QVector<float>(X_RES);
rawDepthHorizon = QVector<float>(X_RES);
 
movementMaskHorizon = QVector<int>(X_RES);
 
movementPointsImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
movementPointsMat = cv::Mat(Y_RES, X_RES, CV_8UC3);
//params.thresholdStep = 20;
//params.minThreshold = 50;
//params.minThreshold = 500;
params.minDistBetweenBlobs = BLOB_MIN_DISTANCE;
params.minArea = BLOB_MIN_AREA;
params.maxArea = BLOB_MAX_AREA;
params.filterByColor = false;
params.filterByCircularity = false;
params.filterByConvexity = false;
params.filterByInertia = false;
params.filterByArea = true;
blobDetector = new cv::SimpleBlobDetector(params);
}
 
DepthProcessor::~DepthProcessor() {
 
}
 
void DepthProcessor::setFOV(float width, float height) {
fovWidth = width;
fovHeight = height;
}
 
/**
* Change the image to display on the main GUI
*/
void DepthProcessor::setDisplayImage(const int pane, const QString &image) {
switch (pane) {
case 0:
if (image == "Raw Depth")
topLeftImage = &rawDepthImage;
else if (image == "Last Valid Depth")
topLeftImage = &lastValidDepthImage;
else if (image == "Movement Mask Raw Depth")
topLeftImage = &movementMaskRawImage;
else if (image == "Movement Mask Average Depth")
topLeftImage = &movementMaskAverageImage;
else if (image == "Processed Depth")
topLeftImage = &lastValidProcessed;
else if (image == "Raw Depth Horizon")
topLeftImage = &rawHorizonImage;
else if (image == "Last Valid Horizon")
topLeftImage = &lastValidHorizonImage;
else if (image == "Overlay Horizon")
topLeftImage = &overlayHorizonImage;
else if (image == "Movement Map")
topLeftImage = &movementPointsImage;
break;
case 1:
if (image == "Raw Depth")
topRightImage = &rawDepthImage;
else if (image == "Last Valid Depth")
topRightImage = &lastValidDepthImage;
else if (image == "Movement Mask Raw Depth")
topRightImage = &movementMaskRawImage;
else if (image == "Movement Mask Average Depth")
topRightImage = &movementMaskAverageImage;
else if (image == "Processed Depth")
topRightImage = &lastValidProcessed;
else if (image == "Raw Depth Horizon")
topRightImage = &rawHorizonImage;
else if (image == "Last Valid Horizon")
topRightImage = &lastValidHorizonImage;
else if (image == "Overlay Horizon")
topRightImage = &overlayHorizonImage;
else if (image == "Movement Map")
topRightImage = &movementPointsImage;
break;
case 2:
if (image == "Raw Depth")
botLeftImage = &rawDepthImage;
else if (image == "Last Valid Depth")
botLeftImage = &lastValidDepthImage;
else if (image == "Movement Mask Raw Depth")
botLeftImage = &movementMaskRawImage;
else if (image == "Movement Mask Average Depth")
botLeftImage = &movementMaskAverageImage;
else if (image == "Processed Depth")
botLeftImage = &lastValidProcessed;
else if (image == "Raw Depth Horizon")
botLeftImage = &rawHorizonImage;
else if (image == "Last Valid Horizon")
botLeftImage = &lastValidHorizonImage;
else if (image == "Overlay Horizon")
botLeftImage = &overlayHorizonImage;
else if (image == "Movement Map")
botLeftImage = &movementPointsImage;
break;
case 3:
if (image == "Raw Depth")
botRightImage = &rawDepthImage;
else if (image == "Last Valid Depth")
botRightImage = &lastValidDepthImage;
else if (image == "Movement Mask Raw Depth")
botRightImage = &movementMaskRawImage;
else if (image == "Movement Mask Average Depth")
botRightImage = &movementMaskAverageImage;
else if (image == "Processed Depth")
botRightImage = &lastValidProcessed;
else if (image == "Raw Depth Horizon")
botRightImage = &rawHorizonImage;
else if (image == "Last Valid Horizon")
botRightImage = &lastValidHorizonImage;
else if (image == "Overlay Horizon")
botRightImage = &overlayHorizonImage;
else if (image == "Movement Map")
botRightImage = &movementPointsImage;
break;
default:
break;
}
}
 
/**
* Updates the main GUI
*/
void DepthProcessor::updateImages() {
emit setImageTopLeft(*topLeftImage);
emit setImageTopRight(*topRightImage);
emit setImageBotLeft(*botLeftImage);
emit setImageBotRight(*botRightImage);
}
 
/**
* Here we process the raw data from the sensor
*/
void DepthProcessor::processDepthData(const cv::Mat &data) {
// The 16-bit raw image is passed in via a pointer
rawData16 = data;
 
// Save a pixel as valid data if it is != 0
for (int y = 0; y < Y_RES; y++) {
for (int x = 0; x < X_RES; x++) {
if (rawData16.ptr<ushort>(y)[x] != 0) {
lastValidData16.ptr<ushort>(y)[x] = rawData16.ptr<ushort>(y)[x];
}
}
}
 
// Apply a 5-pixel wide median filter to the data for noise removal
//cv::medianBlur(lastValidData16, lastValidData16, 5);
 
// Execute a background subtraction to obtain moving objects
pMOG->operator()(lastValidData16, fgMaskMOG, -1);
 
fgMaskMOG.copyTo(fgMaskRaw);
 
// Erode then dilate the mask to remove noise
//cv::erode(fgMaskMOG, fgMaskMOG, cv::Mat());
//cv::dilate(fgMaskMOG, fgMaskMOG, cv::Mat());
// Alternative:
int kernelSize = 9;
cv::Mat kernel = cv::getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(kernelSize, kernelSize));
// Morphological opening (remove small objects from foreground)
cv::morphologyEx(fgMaskMOG, fgMaskMOG, cv::MORPH_OPEN, kernel);
// Morphological closing (fill small holes in the foreground)
cv::morphologyEx(fgMaskMOG, fgMaskMOG, cv::MORPH_CLOSE, kernel);
 
// Average the moving mask's values and shrink it by a bit to remove edges
cv::accumulateWeighted(fgMaskMOG, fgMaskTmp, 0.5);
cv::convertScaleAbs(fgMaskTmp, fgMaskAverage);
cv::erode(fgMaskAverage, fgMaskAverage, kernel);
 
// Get the closest distance in the specified range that is not 0 and convert to inches
for (int x = 0; x < X_RES; x++) {
ushort min = 9999;
ushort rawMin = 9999;
for (int y = VFOV_MIN; y < VFOV_MAX; y++) {
if (lastValidData16.ptr<ushort>(y)[x] != 0)
min = qMin(min, lastValidData16.ptr<ushort>(y)[x]);
rawMin = qMin(rawMin, rawData16.ptr<ushort>(y)[x]);
}
 
// Convert the raw distance values to distance in inches
// Distance (inches) = (raw distance - 13.072) / 25.089;
depthHorizon[x] = (min - 13.072) / 25.089;
rawDepthHorizon[x] = (rawMin - 13.072) / 25.089;
}
 
// Mark the points of detected movements in the movement mask if the threshold is exceeded
for (int x = 0; x < X_RES; x++) {
int moved = 0;
for (int y = VFOV_MIN; y < VFOV_MAX; y++) {
if (fgMaskAverage.ptr<uchar>(y)[x] >= FG_MASK_THRESHOLD)
moved = 1;
}
movementMaskHorizon[x] = moved;
}
 
// Draw all images
drawDepthImages();
drawFOVImages();
 
// Update GUI with selected image
updateImages();
 
processorBuffer.release(1);
}
 
/**
* Generate a visualization of the depth data
*/
void DepthProcessor::drawDepthImages() {
// Convert raw data to images to be displayed
for (int y = 0; y < Y_RES; ++y) {
for (int x = 0; x < X_RES; ++x) {
// rawDepthImage
rawDepthImage.setPixel(x, y, qRgb(rawData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
rawData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, rawData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2)));
 
// lastValidDepthImage
lastValidDepthImage.setPixel(x, y, qRgb(lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
lastValidData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2)));
 
// lastValidProcessed
if (fgMaskMOG.ptr<uchar>(y)[x] == 0) {
lastValidProcessed.setPixel(x, y, qRgba(lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
lastValidData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2), 150));
} else {
lastValidProcessed.setPixel(x, y, qRgb(lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
lastValidData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2)));
}
 
// movementMaskImage
movementMaskRawImage.setPixel(x, y, qRgb(fgMaskRaw.ptr<uchar>(y)[x], fgMaskRaw.ptr<uchar>(y)[x], fgMaskRaw.ptr<uchar>(y)[x]));
 
// movementMaskAverageImage
movementMaskAverageImage.setPixel(x, y, qRgb(fgMaskAverage.ptr<uchar>(y)[x], fgMaskAverage.ptr<uchar>(y)[x], fgMaskAverage.ptr<uchar>(y)[x]));
}
}
 
// Draw lines indicating the FOV zones
QPainter imagePainter;
 
imagePainter.begin(&rawDepthImage);
imagePainter.setPen(QPen(COLOR_DEPTH_FOV, 1));
imagePainter.drawLine(0, VFOV_MIN, X_RES, VFOV_MIN);
imagePainter.drawLine(0, VFOV_MAX, X_RES, VFOV_MAX);
imagePainter.end();
 
imagePainter.begin(&lastValidDepthImage);
imagePainter.setPen(QPen(COLOR_DEPTH_FOV, 1));
imagePainter.drawLine(0, VFOV_MIN, X_RES, VFOV_MIN);
imagePainter.drawLine(0, VFOV_MAX, X_RES, VFOV_MAX);
imagePainter.end();
 
imagePainter.begin(&lastValidProcessed);
imagePainter.setPen(QPen(COLOR_DEPTH_FOV, 1));
imagePainter.setBrush(QBrush(COLOR_DEPTH_FOV_FILL));
imagePainter.drawLine(0, VFOV_MIN, X_RES, VFOV_MIN);
imagePainter.drawLine(0, VFOV_MAX, X_RES, VFOV_MAX);
imagePainter.drawRect(0, 0, X_RES, VFOV_MIN);
imagePainter.drawRect(0, VFOV_MAX, X_RES, Y_RES);
imagePainter.end();
}
 
/**
* Draws the given vector of points onto an image (projected across an arc)
*/
void DepthProcessor::drawDistanceFOV(QImage &image, QVector<float> &data) {
QPainter painter;
painter.begin(&image);
 
// Draw the FOV for the raw data
painter.translate(X_RES / 2, Y_RES);
// Rotate the canvas, draw all distances, then restore original coordinates
painter.rotate(-90 - qRadiansToDegrees(fovWidth / 2));
for (int x = 0; x < X_RES; x++) {
painter.rotate(qRadiansToDegrees(fovWidth / X_RES));
painter.setPen(QPen(COLOR_DEPTH_POINT, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
painter.drawPoint(data[x], 0);
painter.setPen(QPen(COLOR_DEPTH_BACKGROUND, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
painter.drawLine(QPoint(data[x], 0), QPoint(400, 0));
}
 
painter.end();
}
 
/**
* Draws the sensor's FOV onto the image
*/
void DepthProcessor::drawSensorFOV(QImage &image) {
QPainter painter;
painter.begin(&image);
 
// Draw the sensor's FOV
painter.translate(X_RES / 2, Y_RES);
painter.setPen(QPen(COLOR_FOV, 2, Qt::DashLine));
painter.rotate(-90 - qRadiansToDegrees(fovWidth / 2));
painter.drawLine(0, 0, X_RES, 0);
painter.rotate(qRadiansToDegrees(fovWidth));
painter.drawLine(0, 0, X_RES, 0);
 
painter.end();
}
 
/**
* Generate a horizontal visualization of the depth data
*/
void DepthProcessor::drawFOVImages() {
// Draw the raw FOV
rawHorizonImage.fill(Qt::white);
drawDistanceFOV(rawHorizonImage, rawDepthHorizon);
drawSensorFOV(rawHorizonImage);
 
// Draw the last valid data FOV
lastValidHorizonImage.fill(Qt::white);
drawDistanceFOV(lastValidHorizonImage, depthHorizon);
drawSensorFOV(lastValidHorizonImage);
 
// Draw only the movement points along with results of blob detection
movementPointsImage.fill(Qt::white);
drawMovementZones(movementPointsImage, depthHorizon);
convertQImageToMat3C(movementPointsImage, movementPointsMat);
blobDetector->detect(movementPointsMat, blobKeypoints);
std::vector<cv::Point2f> points;
for (int i = 0; i < blobKeypoints.size(); i++) {
points.push_back(cv::Point2f(blobKeypoints[i].pt.x, blobKeypoints[i].pt.y));
}
movementObjects = movementTracker.update(points);
drawKeyPoints(movementPointsImage, blobKeypoints);
drawMovingObjects(movementPointsImage, movementObjects);
drawSensorFOV(movementPointsImage);
 
// Draw the overlay of movements onto static objects
overlayHorizonImage.fill(Qt::white);
drawDistanceFOV(overlayHorizonImage, depthHorizon);
drawMovementZones(overlayHorizonImage, depthHorizon);
drawKeyPoints(overlayHorizonImage, blobKeypoints);
drawMovingObjects(overlayHorizonImage, movementObjects);
drawSensorFOV(overlayHorizonImage);
}
 
/**
* Draws the zones of detected movement on the map
*/
void DepthProcessor::drawMovementZones(QImage &image, QVector<float> &data) {
QPainter painter;
painter.begin(&image);
 
// Draw the FOV for the raw data
painter.translate(X_RES / 2, Y_RES);
// Rotate the canvas, draw all distances, then restore original coordinates
painter.rotate(-90 - qRadiansToDegrees(fovWidth / 2));
painter.setPen(QPen(COLOR_MOVEMENT_ZONE, 20, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
for (int x = 0; x < X_RES; x++) {
painter.rotate(qRadiansToDegrees(fovWidth / X_RES));
if (movementMaskHorizon[x] == 1)
painter.drawPoint(data[x], 0);
}
 
painter.end();
}
 
/**
* Draws the set of keypoints on the image
*/
void DepthProcessor::drawKeyPoints(QImage &image, std::vector<cv::KeyPoint> &points) {
QPainter painter;
painter.begin(&image);
 
painter.setPen(QPen(COLOR_KEYPOINT, 6, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
for (int i = 0; i < points.size(); i++) {
painter.drawPoint(points[i].pt.x, points[i].pt.y);
}
 
painter.end();
}
 
/**
* Draws the moving objects along with its ID, velocity, and angle of predicted movement
*/
void DepthProcessor::drawMovingObjects(QImage &image, std::vector<MOVING_OBJECT> &objects) {
QPainter painter;
painter.begin(&image);
 
for (int i = 0; i < objects.size(); i++) {
QPoint initPoint = QPoint(objects[i].predicted_pt.x, objects[i].predicted_pt.y);
// Calculate the line to draw to indicate object movement velocity and angle
float velocity_x = initPoint.x() + (objects[i].velocity * cos(objects[i].angle)) * VELOCITY_MULTIPLIER;
float velocity_y = initPoint.y() + (objects[i].velocity * sin(objects[i].angle)) * VELOCITY_MULTIPLIER;
QPointF predPoint = QPointF(velocity_x, velocity_y);
// Draw the object's estimated position
painter.setPen(QPen(COLOR_EST_POSITION, 6, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
painter.drawPoint(initPoint);
// Draw the object's ID
painter.drawText(initPoint.x() + 3, initPoint.y() - 3, QString::number(objects[i].ID));
// Draw the line indicating object's movement velocity and angle
painter.setPen(QPen(COLOR_EST_POSITION, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
painter.drawLine(initPoint, predPoint);
// Draw the object's running average
painter.setPen(QPen(COLOR_EST_AVGERAGE, 6, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
painter.drawPoint(objects[i].historyAvg.x, objects[i].historyAvg.y);
}
 
painter.end();
}
 
void DepthProcessor::convertMatToQImage3C(cv::Mat &mat, QImage &image) {
uchar *ptr;
for (int y = 0; y < Y_RES; y++) {
ptr = mat.ptr<uchar>(y);
for (int x = 0; x < X_RES; x++) {
image.setPixel(x, y, qRgb(ptr[x], ptr[x], ptr[x]));
}
}
}
 
void DepthProcessor::convertQImageToMat3C(QImage &image, cv::Mat &mat) {
for (int y = 0; y < Y_RES; y++) {
for (int x = 0; x < X_RES; x++) {
cv::Vec3b &pixel = mat.at<cv::Vec3b>(y, x);
QColor pixColor(image.pixel(x, y));
pixel.val[0] = pixColor.blue();
pixel.val[1] = pixColor.green();
pixel.val[2] = pixColor.red();
}
}
}