Subversion Repositories Code-Repo

Compare Revisions

Ignore whitespace Rev 290 → Rev 291

/Classwork/ME5524 - Bayesian Robotics/Final Project/DepthProcessor.h
0,0 → 1,118
#ifndef DEPTHPROCESSOR_H
#define DEPTHPROCESSOR_H
 
#include "GlobalDefines.h"
 
// Divisor for visualizing the depth image (16->8 bits)
#define SCALE_DIVISOR 16
 
// Field of view to generate horizontal map from
#define VFOV_MIN 100
#define VFOV_MAX 140
 
// Threshold for moving object recognition
#define FG_MASK_THRESHOLD 220
 
// Parameters for the background subtractor
#define BACKGROUND_SUBTRACTOR_HISTORY 150
#define BACKGROUND_SUBTRACTOR_NMIXTURES 1
 
// Parameters for blob detection
#define BLOB_MIN_DISTANCE 30
#define BLOB_MIN_AREA 20
#define BLOB_MAX_AREA 9999
 
// Multiplier for velocity line indicator
#define VELOCITY_MULTIPLIER 5
 
// Indicator colors
#define COLOR_DEPTH_FOV QColor(255, 255, 255, 100)
#define COLOR_DEPTH_FOV_FILL QColor(0, 0, 0, 100)
#define COLOR_DEPTH_POINT QColor(0, 0, 255, 255)
#define COLOR_DEPTH_BACKGROUND QColor(255, 0, 0, 10)
#define COLOR_FOV QColor(0, 0, 0, 100)
#define COLOR_MOVEMENT_ZONE QColor(255, 0, 0, 5)
#define COLOR_KEYPOINT QColor(255, 255, 0, 200)
#define COLOR_EST_POSITION QColor(0, 0, 255, 200)
#define COLOR_EST_AVGERAGE QColor(0, 255, 0, 200)
 
class DepthProcessor : public QThread
{
Q_OBJECT
 
public:
DepthProcessor(QObject *parent = 0);
~DepthProcessor();
 
public slots:
void setFOV(float width, float height);
void setDisplayImage(const int, const QString &);
void processDepthData(const cv::Mat &);
 
signals:
void setImageTopLeft(const QImage &);
void setImageTopRight(const QImage &);
void setImageBotLeft(const QImage &);
void setImageBotRight(const QImage &);
 
private:
// Pointer to images to be displayed on the GUI
QImage *topLeftImage;
QImage *topRightImage;
QImage *botLeftImage;
QImage *botRightImage;
void updateImages();
 
void drawDepthImages();
void drawFOVImages();
 
void drawDistanceFOV(QImage &, QVector<float> &);
void drawSensorFOV(QImage &image);
void drawMovementZones(QImage &, QVector<float> &);
void drawKeyPoints(QImage &image, std::vector<cv::KeyPoint> &);
void drawMovingObjects(QImage &, std::vector<MOVING_OBJECT> &);
 
void convertMatToQImage3C(cv::Mat &, QImage &);
void convertQImageToMat3C(QImage &, cv::Mat &);
 
float fovWidth;
float fovHeight;
 
cv::Mat rawData16; // Raw 16-bit sensor data
QImage rawDepthImage; // Image of --^
cv::Mat lastValidData16; // Last known valid 16-bit data
QImage lastValidDepthImage; // Image of --^
QImage lastValidProcessed; // Processed image with overlay
cv::Mat fgMaskMOG; // Mask of detected movements
QImage movementMaskImage; // Image of --^
cv::Mat fgMaskTmp; // Temporary buffer for averaging mask values
cv::Mat fgMaskRaw; // Raw values for movement mask
QImage movementMaskRawImage; // Image of --^
cv::Mat fgMaskAverage; // Weighted average of mask values
QImage movementMaskAverageImage; // Image of --^
cv::Ptr<cv::BackgroundSubtractor> pMOG; // Background subtractor
 
QImage rawHorizonImage; // 2D image showing depth points (raw)
QImage lastValidHorizonImage; // 2D image showing depth points
QImage overlayHorizonImage; // 2D image with all overlays
 
QVector<float> rawDepthHorizon; // Depth points in inches (raw)
QVector<float> depthHorizon; // Depth points in inches
 
QVector<int> movementMaskHorizon; // 1D condensed movement mask
 
cv::Mat movementPointsMat; // Image showing object movement zones
QImage movementPointsImage; // Image of --^
cv::SimpleBlobDetector::Params params; // Parameters for blob detection
cv::Ptr<cv::FeatureDetector> blobDetector; // Blob detector
std::vector<cv::KeyPoint> blobKeypoints; // Keypoints from blob detector
 
MovingPointTracker movementTracker; // Movement tracker
std::vector<MOVING_OBJECT> movementObjects; // Results from movement tracker
 
};
 
#endif // DEPTHPROCESSOR_H