| 287 |
Kevin |
1 |
#include "DepthProcessor.h"
|
|
|
2 |
|
|
|
3 |
// Limit the processing buffer to X frames
|
|
|
4 |
QSemaphore processorBuffer(3);
|
|
|
5 |
|
|
|
6 |
DepthProcessor::DepthProcessor(QObject *parent)
|
|
|
7 |
: QThread(parent) {
|
|
|
8 |
|
|
|
9 |
moveToThread(this);
|
|
|
10 |
|
|
|
11 |
topLeftImage = NULL;
|
|
|
12 |
topRightImage = NULL;
|
|
|
13 |
botLeftImage = NULL;
|
|
|
14 |
botRightImage = NULL;
|
|
|
15 |
|
|
|
16 |
rawDepthImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
17 |
lastValidData16 = cv::Mat(Y_RES, X_RES, CV_16UC1, cv::Scalar(0));
|
|
|
18 |
lastValidDepthImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
19 |
lastValidProcessed = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
20 |
|
|
|
21 |
fgMaskMOG = cv::Mat(Y_RES, X_RES, CV_8UC1);
|
|
|
22 |
movementMaskImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
23 |
fgMaskTmp = cv::Mat(Y_RES, X_RES, CV_32FC1);
|
|
|
24 |
fgMaskRaw = cv::Mat(Y_RES, X_RES, CV_8UC1);
|
|
|
25 |
movementMaskRawImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
26 |
fgMaskAverage = cv::Mat(Y_RES, X_RES, CV_8UC1);
|
|
|
27 |
movementMaskAverageImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
28 |
pMOG = new cv::BackgroundSubtractorMOG2(BACKGROUND_SUBTRACTOR_HISTORY, BACKGROUND_SUBTRACTOR_NMIXTURES, false);
|
|
|
29 |
|
|
|
30 |
rawHorizonImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
31 |
lastValidHorizonImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
32 |
overlayHorizonImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
33 |
|
|
|
34 |
depthHorizon = QVector<float>(X_RES);
|
|
|
35 |
rawDepthHorizon = QVector<float>(X_RES);
|
|
|
36 |
|
|
|
37 |
movementMaskHorizon = QVector<int>(X_RES);
|
|
|
38 |
|
|
|
39 |
movementPointsImage = QImage(X_RES, Y_RES, QImage::Format_ARGB32);
|
|
|
40 |
movementPointsMat = cv::Mat(Y_RES, X_RES, CV_8UC3);
|
|
|
41 |
//params.thresholdStep = 20;
|
|
|
42 |
//params.minThreshold = 50;
|
|
|
43 |
//params.minThreshold = 500;
|
|
|
44 |
params.minDistBetweenBlobs = BLOB_MIN_DISTANCE;
|
|
|
45 |
params.minArea = BLOB_MIN_AREA;
|
|
|
46 |
params.maxArea = BLOB_MAX_AREA;
|
|
|
47 |
params.filterByColor = false;
|
|
|
48 |
params.filterByCircularity = false;
|
|
|
49 |
params.filterByConvexity = false;
|
|
|
50 |
params.filterByInertia = false;
|
|
|
51 |
params.filterByArea = true;
|
|
|
52 |
blobDetector = new cv::SimpleBlobDetector(params);
|
|
|
53 |
}
|
|
|
54 |
|
|
|
55 |
DepthProcessor::~DepthProcessor() {
|
|
|
56 |
|
|
|
57 |
}
|
|
|
58 |
|
|
|
59 |
void DepthProcessor::setFOV(float width, float height) {
|
|
|
60 |
fovWidth = width;
|
|
|
61 |
fovHeight = height;
|
|
|
62 |
}
|
|
|
63 |
|
|
|
64 |
/**
|
|
|
65 |
* Change the image to display on the main GUI
|
|
|
66 |
*/
|
|
|
67 |
void DepthProcessor::setDisplayImage(const int pane, const QString &image) {
|
|
|
68 |
switch (pane) {
|
|
|
69 |
case 0:
|
|
|
70 |
if (image == "Raw Depth")
|
|
|
71 |
topLeftImage = &rawDepthImage;
|
|
|
72 |
else if (image == "Last Valid Depth")
|
|
|
73 |
topLeftImage = &lastValidDepthImage;
|
|
|
74 |
else if (image == "Movement Mask Raw Depth")
|
|
|
75 |
topLeftImage = &movementMaskRawImage;
|
|
|
76 |
else if (image == "Movement Mask Average Depth")
|
|
|
77 |
topLeftImage = &movementMaskAverageImage;
|
|
|
78 |
else if (image == "Processed Depth")
|
|
|
79 |
topLeftImage = &lastValidProcessed;
|
|
|
80 |
else if (image == "Raw Depth Horizon")
|
|
|
81 |
topLeftImage = &rawHorizonImage;
|
|
|
82 |
else if (image == "Last Valid Horizon")
|
|
|
83 |
topLeftImage = &lastValidHorizonImage;
|
|
|
84 |
else if (image == "Overlay Horizon")
|
|
|
85 |
topLeftImage = &overlayHorizonImage;
|
|
|
86 |
else if (image == "Movement Map")
|
|
|
87 |
topLeftImage = &movementPointsImage;
|
|
|
88 |
break;
|
|
|
89 |
case 1:
|
|
|
90 |
if (image == "Raw Depth")
|
|
|
91 |
topRightImage = &rawDepthImage;
|
|
|
92 |
else if (image == "Last Valid Depth")
|
|
|
93 |
topRightImage = &lastValidDepthImage;
|
|
|
94 |
else if (image == "Movement Mask Raw Depth")
|
|
|
95 |
topRightImage = &movementMaskRawImage;
|
|
|
96 |
else if (image == "Movement Mask Average Depth")
|
|
|
97 |
topRightImage = &movementMaskAverageImage;
|
|
|
98 |
else if (image == "Processed Depth")
|
|
|
99 |
topRightImage = &lastValidProcessed;
|
|
|
100 |
else if (image == "Raw Depth Horizon")
|
|
|
101 |
topRightImage = &rawHorizonImage;
|
|
|
102 |
else if (image == "Last Valid Horizon")
|
|
|
103 |
topRightImage = &lastValidHorizonImage;
|
|
|
104 |
else if (image == "Overlay Horizon")
|
|
|
105 |
topRightImage = &overlayHorizonImage;
|
|
|
106 |
else if (image == "Movement Map")
|
|
|
107 |
topRightImage = &movementPointsImage;
|
|
|
108 |
break;
|
|
|
109 |
case 2:
|
|
|
110 |
if (image == "Raw Depth")
|
|
|
111 |
botLeftImage = &rawDepthImage;
|
|
|
112 |
else if (image == "Last Valid Depth")
|
|
|
113 |
botLeftImage = &lastValidDepthImage;
|
|
|
114 |
else if (image == "Movement Mask Raw Depth")
|
|
|
115 |
botLeftImage = &movementMaskRawImage;
|
|
|
116 |
else if (image == "Movement Mask Average Depth")
|
|
|
117 |
botLeftImage = &movementMaskAverageImage;
|
|
|
118 |
else if (image == "Processed Depth")
|
|
|
119 |
botLeftImage = &lastValidProcessed;
|
|
|
120 |
else if (image == "Raw Depth Horizon")
|
|
|
121 |
botLeftImage = &rawHorizonImage;
|
|
|
122 |
else if (image == "Last Valid Horizon")
|
|
|
123 |
botLeftImage = &lastValidHorizonImage;
|
|
|
124 |
else if (image == "Overlay Horizon")
|
|
|
125 |
botLeftImage = &overlayHorizonImage;
|
|
|
126 |
else if (image == "Movement Map")
|
|
|
127 |
botLeftImage = &movementPointsImage;
|
|
|
128 |
break;
|
|
|
129 |
case 3:
|
|
|
130 |
if (image == "Raw Depth")
|
|
|
131 |
botRightImage = &rawDepthImage;
|
|
|
132 |
else if (image == "Last Valid Depth")
|
|
|
133 |
botRightImage = &lastValidDepthImage;
|
|
|
134 |
else if (image == "Movement Mask Raw Depth")
|
|
|
135 |
botRightImage = &movementMaskRawImage;
|
|
|
136 |
else if (image == "Movement Mask Average Depth")
|
|
|
137 |
botRightImage = &movementMaskAverageImage;
|
|
|
138 |
else if (image == "Processed Depth")
|
|
|
139 |
botRightImage = &lastValidProcessed;
|
|
|
140 |
else if (image == "Raw Depth Horizon")
|
|
|
141 |
botRightImage = &rawHorizonImage;
|
|
|
142 |
else if (image == "Last Valid Horizon")
|
|
|
143 |
botRightImage = &lastValidHorizonImage;
|
|
|
144 |
else if (image == "Overlay Horizon")
|
|
|
145 |
botRightImage = &overlayHorizonImage;
|
|
|
146 |
else if (image == "Movement Map")
|
|
|
147 |
botRightImage = &movementPointsImage;
|
|
|
148 |
break;
|
|
|
149 |
default:
|
|
|
150 |
break;
|
|
|
151 |
}
|
|
|
152 |
}
|
|
|
153 |
|
|
|
154 |
/**
|
|
|
155 |
* Updates the main GUI
|
|
|
156 |
*/
|
|
|
157 |
void DepthProcessor::updateImages() {
|
|
|
158 |
emit setImageTopLeft(*topLeftImage);
|
|
|
159 |
emit setImageTopRight(*topRightImage);
|
|
|
160 |
emit setImageBotLeft(*botLeftImage);
|
|
|
161 |
emit setImageBotRight(*botRightImage);
|
|
|
162 |
}
|
|
|
163 |
|
|
|
164 |
/**
|
|
|
165 |
* Here we process the raw data from the sensor
|
|
|
166 |
*/
|
|
|
167 |
void DepthProcessor::processDepthData(const cv::Mat &data) {
|
|
|
168 |
// The 16-bit raw image is passed in via a pointer
|
|
|
169 |
rawData16 = data;
|
|
|
170 |
|
|
|
171 |
// Save a pixel as valid data if it is != 0
|
|
|
172 |
for (int y = 0; y < Y_RES; y++) {
|
|
|
173 |
for (int x = 0; x < X_RES; x++) {
|
|
|
174 |
if (rawData16.ptr<ushort>(y)[x] != 0) {
|
|
|
175 |
lastValidData16.ptr<ushort>(y)[x] = rawData16.ptr<ushort>(y)[x];
|
|
|
176 |
}
|
|
|
177 |
}
|
|
|
178 |
}
|
|
|
179 |
|
|
|
180 |
// Apply a 5-pixel wide median filter to the data for noise removal
|
|
|
181 |
//cv::medianBlur(lastValidData16, lastValidData16, 5);
|
|
|
182 |
|
|
|
183 |
// Execute a background subtraction to obtain moving objects
|
|
|
184 |
pMOG->operator()(lastValidData16, fgMaskMOG, -1);
|
|
|
185 |
|
|
|
186 |
fgMaskMOG.copyTo(fgMaskRaw);
|
|
|
187 |
|
|
|
188 |
// Erode then dilate the mask to remove noise
|
|
|
189 |
//cv::erode(fgMaskMOG, fgMaskMOG, cv::Mat());
|
|
|
190 |
//cv::dilate(fgMaskMOG, fgMaskMOG, cv::Mat());
|
|
|
191 |
// Alternative:
|
|
|
192 |
int kernelSize = 9;
|
|
|
193 |
cv::Mat kernel = cv::getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(kernelSize, kernelSize));
|
|
|
194 |
// Morphological opening (remove small objects from foreground)
|
|
|
195 |
cv::morphologyEx(fgMaskMOG, fgMaskMOG, cv::MORPH_OPEN, kernel);
|
|
|
196 |
// Morphological closing (fill small holes in the foreground)
|
|
|
197 |
cv::morphologyEx(fgMaskMOG, fgMaskMOG, cv::MORPH_CLOSE, kernel);
|
|
|
198 |
|
|
|
199 |
// Average the moving mask's values and shrink it by a bit to remove edges
|
|
|
200 |
cv::accumulateWeighted(fgMaskMOG, fgMaskTmp, 0.5);
|
|
|
201 |
cv::convertScaleAbs(fgMaskTmp, fgMaskAverage);
|
|
|
202 |
cv::erode(fgMaskAverage, fgMaskAverage, kernel);
|
|
|
203 |
|
|
|
204 |
// Get the closest distance in the specified range that is not 0 and convert to inches
|
|
|
205 |
for (int x = 0; x < X_RES; x++) {
|
|
|
206 |
ushort min = 9999;
|
|
|
207 |
ushort rawMin = 9999;
|
|
|
208 |
for (int y = VFOV_MIN; y < VFOV_MAX; y++) {
|
|
|
209 |
if (lastValidData16.ptr<ushort>(y)[x] != 0)
|
|
|
210 |
min = qMin(min, lastValidData16.ptr<ushort>(y)[x]);
|
|
|
211 |
rawMin = qMin(rawMin, rawData16.ptr<ushort>(y)[x]);
|
|
|
212 |
}
|
|
|
213 |
|
|
|
214 |
// Convert the raw distance values to distance in inches
|
|
|
215 |
// Distance (inches) = (raw distance - 13.072) / 25.089;
|
|
|
216 |
depthHorizon[x] = (min - 13.072) / 25.089;
|
|
|
217 |
rawDepthHorizon[x] = (rawMin - 13.072) / 25.089;
|
|
|
218 |
}
|
|
|
219 |
|
|
|
220 |
// Mark the points of detected movements in the movement mask if the threshold is exceeded
|
|
|
221 |
for (int x = 0; x < X_RES; x++) {
|
|
|
222 |
int moved = 0;
|
|
|
223 |
for (int y = VFOV_MIN; y < VFOV_MAX; y++) {
|
|
|
224 |
if (fgMaskAverage.ptr<uchar>(y)[x] >= FG_MASK_THRESHOLD)
|
|
|
225 |
moved = 1;
|
|
|
226 |
}
|
|
|
227 |
movementMaskHorizon[x] = moved;
|
|
|
228 |
}
|
|
|
229 |
|
|
|
230 |
// Draw all images
|
|
|
231 |
drawDepthImages();
|
|
|
232 |
drawFOVImages();
|
|
|
233 |
|
|
|
234 |
// Update GUI with selected image
|
|
|
235 |
updateImages();
|
|
|
236 |
|
|
|
237 |
processorBuffer.release(1);
|
|
|
238 |
}
|
|
|
239 |
|
|
|
240 |
/**
|
|
|
241 |
* Generate a visualization of the depth data
|
|
|
242 |
*/
|
|
|
243 |
void DepthProcessor::drawDepthImages() {
|
|
|
244 |
// Convert raw data to images to be displayed
|
|
|
245 |
for (int y = 0; y < Y_RES; ++y) {
|
|
|
246 |
for (int x = 0; x < X_RES; ++x) {
|
|
|
247 |
// rawDepthImage
|
|
|
248 |
rawDepthImage.setPixel(x, y, qRgb(rawData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
|
|
|
249 |
rawData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, rawData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2)));
|
|
|
250 |
|
|
|
251 |
// lastValidDepthImage
|
|
|
252 |
lastValidDepthImage.setPixel(x, y, qRgb(lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
|
|
|
253 |
lastValidData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2)));
|
|
|
254 |
|
|
|
255 |
// lastValidProcessed
|
|
|
256 |
if (fgMaskMOG.ptr<uchar>(y)[x] == 0) {
|
|
|
257 |
lastValidProcessed.setPixel(x, y, qRgba(lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
|
|
|
258 |
lastValidData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2), 150));
|
|
|
259 |
} else {
|
|
|
260 |
lastValidProcessed.setPixel(x, y, qRgb(lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR / 2),
|
|
|
261 |
lastValidData16.ptr<ushort>(y)[x] / SCALE_DIVISOR, lastValidData16.ptr<ushort>(y)[x] / (SCALE_DIVISOR * 2)));
|
|
|
262 |
}
|
|
|
263 |
|
|
|
264 |
// movementMaskImage
|
|
|
265 |
movementMaskRawImage.setPixel(x, y, qRgb(fgMaskRaw.ptr<uchar>(y)[x], fgMaskRaw.ptr<uchar>(y)[x], fgMaskRaw.ptr<uchar>(y)[x]));
|
|
|
266 |
|
|
|
267 |
// movementMaskAverageImage
|
|
|
268 |
movementMaskAverageImage.setPixel(x, y, qRgb(fgMaskAverage.ptr<uchar>(y)[x], fgMaskAverage.ptr<uchar>(y)[x], fgMaskAverage.ptr<uchar>(y)[x]));
|
|
|
269 |
}
|
|
|
270 |
}
|
|
|
271 |
|
|
|
272 |
// Draw lines indicating the FOV zones
|
|
|
273 |
QPainter imagePainter;
|
|
|
274 |
|
|
|
275 |
imagePainter.begin(&rawDepthImage);
|
|
|
276 |
imagePainter.setPen(QPen(COLOR_DEPTH_FOV, 1));
|
|
|
277 |
imagePainter.drawLine(0, VFOV_MIN, X_RES, VFOV_MIN);
|
|
|
278 |
imagePainter.drawLine(0, VFOV_MAX, X_RES, VFOV_MAX);
|
|
|
279 |
imagePainter.end();
|
|
|
280 |
|
|
|
281 |
imagePainter.begin(&lastValidDepthImage);
|
|
|
282 |
imagePainter.setPen(QPen(COLOR_DEPTH_FOV, 1));
|
|
|
283 |
imagePainter.drawLine(0, VFOV_MIN, X_RES, VFOV_MIN);
|
|
|
284 |
imagePainter.drawLine(0, VFOV_MAX, X_RES, VFOV_MAX);
|
|
|
285 |
imagePainter.end();
|
|
|
286 |
|
|
|
287 |
imagePainter.begin(&lastValidProcessed);
|
|
|
288 |
imagePainter.setPen(QPen(COLOR_DEPTH_FOV, 1));
|
|
|
289 |
imagePainter.setBrush(QBrush(COLOR_DEPTH_FOV_FILL));
|
|
|
290 |
imagePainter.drawLine(0, VFOV_MIN, X_RES, VFOV_MIN);
|
|
|
291 |
imagePainter.drawLine(0, VFOV_MAX, X_RES, VFOV_MAX);
|
|
|
292 |
imagePainter.drawRect(0, 0, X_RES, VFOV_MIN);
|
|
|
293 |
imagePainter.drawRect(0, VFOV_MAX, X_RES, Y_RES);
|
|
|
294 |
imagePainter.end();
|
|
|
295 |
}
|
|
|
296 |
|
|
|
297 |
/**
|
|
|
298 |
* Draws the given vector of points onto an image (projected across an arc)
|
|
|
299 |
*/
|
|
|
300 |
void DepthProcessor::drawDistanceFOV(QImage &image, QVector<float> &data) {
|
|
|
301 |
QPainter painter;
|
|
|
302 |
painter.begin(&image);
|
|
|
303 |
|
|
|
304 |
// Draw the FOV for the raw data
|
|
|
305 |
painter.translate(X_RES / 2, Y_RES);
|
|
|
306 |
// Rotate the canvas, draw all distances, then restore original coordinates
|
|
|
307 |
painter.rotate(-90 - qRadiansToDegrees(fovWidth / 2));
|
|
|
308 |
for (int x = 0; x < X_RES; x++) {
|
|
|
309 |
painter.rotate(qRadiansToDegrees(fovWidth / X_RES));
|
|
|
310 |
painter.setPen(QPen(COLOR_DEPTH_POINT, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
311 |
painter.drawPoint(data[x], 0);
|
|
|
312 |
painter.setPen(QPen(COLOR_DEPTH_BACKGROUND, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
313 |
painter.drawLine(QPoint(data[x], 0), QPoint(400, 0));
|
|
|
314 |
}
|
|
|
315 |
|
|
|
316 |
painter.end();
|
|
|
317 |
}
|
|
|
318 |
|
|
|
319 |
/**
|
|
|
320 |
* Draws the sensor's FOV onto the image
|
|
|
321 |
*/
|
|
|
322 |
void DepthProcessor::drawSensorFOV(QImage &image) {
|
|
|
323 |
QPainter painter;
|
|
|
324 |
painter.begin(&image);
|
|
|
325 |
|
|
|
326 |
// Draw the sensor's FOV
|
|
|
327 |
painter.translate(X_RES / 2, Y_RES);
|
|
|
328 |
painter.setPen(QPen(COLOR_FOV, 2, Qt::DashLine));
|
|
|
329 |
painter.rotate(-90 - qRadiansToDegrees(fovWidth / 2));
|
|
|
330 |
painter.drawLine(0, 0, X_RES, 0);
|
|
|
331 |
painter.rotate(qRadiansToDegrees(fovWidth));
|
|
|
332 |
painter.drawLine(0, 0, X_RES, 0);
|
|
|
333 |
|
|
|
334 |
painter.end();
|
|
|
335 |
}
|
|
|
336 |
|
|
|
337 |
/**
|
|
|
338 |
* Generate a horizontal visualization of the depth data
|
|
|
339 |
*/
|
|
|
340 |
void DepthProcessor::drawFOVImages() {
|
|
|
341 |
// Draw the raw FOV
|
|
|
342 |
rawHorizonImage.fill(Qt::white);
|
|
|
343 |
drawDistanceFOV(rawHorizonImage, rawDepthHorizon);
|
|
|
344 |
drawSensorFOV(rawHorizonImage);
|
|
|
345 |
|
|
|
346 |
// Draw the last valid data FOV
|
|
|
347 |
lastValidHorizonImage.fill(Qt::white);
|
|
|
348 |
drawDistanceFOV(lastValidHorizonImage, depthHorizon);
|
|
|
349 |
drawSensorFOV(lastValidHorizonImage);
|
|
|
350 |
|
|
|
351 |
// Draw only the movement points along with results of blob detection
|
|
|
352 |
movementPointsImage.fill(Qt::white);
|
|
|
353 |
drawMovementZones(movementPointsImage, depthHorizon);
|
|
|
354 |
convertQImageToMat3C(movementPointsImage, movementPointsMat);
|
|
|
355 |
blobDetector->detect(movementPointsMat, blobKeypoints);
|
|
|
356 |
std::vector<cv::Point2f> points;
|
|
|
357 |
for (int i = 0; i < blobKeypoints.size(); i++) {
|
|
|
358 |
points.push_back(cv::Point2f(blobKeypoints[i].pt.x, blobKeypoints[i].pt.y));
|
|
|
359 |
}
|
|
|
360 |
movementObjects = movementTracker.update(points);
|
|
|
361 |
drawKeyPoints(movementPointsImage, blobKeypoints);
|
|
|
362 |
drawMovingObjects(movementPointsImage, movementObjects);
|
|
|
363 |
drawSensorFOV(movementPointsImage);
|
|
|
364 |
|
|
|
365 |
// Draw the overlay of movements onto static objects
|
|
|
366 |
overlayHorizonImage.fill(Qt::white);
|
|
|
367 |
drawDistanceFOV(overlayHorizonImage, depthHorizon);
|
|
|
368 |
drawMovementZones(overlayHorizonImage, depthHorizon);
|
|
|
369 |
drawKeyPoints(overlayHorizonImage, blobKeypoints);
|
|
|
370 |
drawMovingObjects(overlayHorizonImage, movementObjects);
|
|
|
371 |
drawSensorFOV(overlayHorizonImage);
|
|
|
372 |
}
|
|
|
373 |
|
|
|
374 |
/**
|
|
|
375 |
* Draws the zones of detected movement on the map
|
|
|
376 |
*/
|
|
|
377 |
void DepthProcessor::drawMovementZones(QImage &image, QVector<float> &data) {
|
|
|
378 |
QPainter painter;
|
|
|
379 |
painter.begin(&image);
|
|
|
380 |
|
|
|
381 |
// Draw the FOV for the raw data
|
|
|
382 |
painter.translate(X_RES / 2, Y_RES);
|
|
|
383 |
// Rotate the canvas, draw all distances, then restore original coordinates
|
|
|
384 |
painter.rotate(-90 - qRadiansToDegrees(fovWidth / 2));
|
|
|
385 |
painter.setPen(QPen(COLOR_MOVEMENT_ZONE, 20, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
386 |
for (int x = 0; x < X_RES; x++) {
|
|
|
387 |
painter.rotate(qRadiansToDegrees(fovWidth / X_RES));
|
|
|
388 |
if (movementMaskHorizon[x] == 1)
|
|
|
389 |
painter.drawPoint(data[x], 0);
|
|
|
390 |
}
|
|
|
391 |
|
|
|
392 |
painter.end();
|
|
|
393 |
}
|
|
|
394 |
|
|
|
395 |
/**
|
|
|
396 |
* Draws the set of keypoints on the image
|
|
|
397 |
*/
|
|
|
398 |
void DepthProcessor::drawKeyPoints(QImage &image, std::vector<cv::KeyPoint> &points) {
|
|
|
399 |
QPainter painter;
|
|
|
400 |
painter.begin(&image);
|
|
|
401 |
|
|
|
402 |
painter.setPen(QPen(COLOR_KEYPOINT, 6, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
403 |
for (int i = 0; i < points.size(); i++) {
|
|
|
404 |
painter.drawPoint(points[i].pt.x, points[i].pt.y);
|
|
|
405 |
}
|
|
|
406 |
|
|
|
407 |
painter.end();
|
|
|
408 |
}
|
|
|
409 |
|
|
|
410 |
/**
|
|
|
411 |
* Draws the moving objects along with its ID, velocity, and angle of predicted movement
|
|
|
412 |
*/
|
|
|
413 |
void DepthProcessor::drawMovingObjects(QImage &image, std::vector<MOVING_OBJECT> &objects) {
|
|
|
414 |
QPainter painter;
|
|
|
415 |
painter.begin(&image);
|
|
|
416 |
|
|
|
417 |
for (int i = 0; i < objects.size(); i++) {
|
|
|
418 |
QPoint initPoint = QPoint(objects[i].predicted_pt.x, objects[i].predicted_pt.y);
|
|
|
419 |
// Calculate the line to draw to indicate object movement velocity and angle
|
|
|
420 |
float velocity_x = initPoint.x() + (objects[i].velocity * cos(objects[i].angle)) * VELOCITY_MULTIPLIER;
|
|
|
421 |
float velocity_y = initPoint.y() + (objects[i].velocity * sin(objects[i].angle)) * VELOCITY_MULTIPLIER;
|
|
|
422 |
QPointF predPoint = QPointF(velocity_x, velocity_y);
|
|
|
423 |
// Draw the object's estimated position
|
|
|
424 |
painter.setPen(QPen(COLOR_EST_POSITION, 6, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
425 |
painter.drawPoint(initPoint);
|
|
|
426 |
// Draw the object's ID
|
|
|
427 |
painter.drawText(initPoint.x() + 3, initPoint.y() - 3, QString::number(objects[i].ID));
|
|
|
428 |
// Draw the line indicating object's movement velocity and angle
|
|
|
429 |
painter.setPen(QPen(COLOR_EST_POSITION, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
430 |
painter.drawLine(initPoint, predPoint);
|
|
|
431 |
// Draw the object's running average
|
|
|
432 |
painter.setPen(QPen(COLOR_EST_AVGERAGE, 6, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin));
|
|
|
433 |
painter.drawPoint(objects[i].historyAvg.x, objects[i].historyAvg.y);
|
|
|
434 |
}
|
|
|
435 |
|
|
|
436 |
painter.end();
|
|
|
437 |
}
|
|
|
438 |
|
|
|
439 |
void DepthProcessor::convertMatToQImage3C(cv::Mat &mat, QImage &image) {
|
|
|
440 |
uchar *ptr;
|
|
|
441 |
for (int y = 0; y < Y_RES; y++) {
|
|
|
442 |
ptr = mat.ptr<uchar>(y);
|
|
|
443 |
for (int x = 0; x < X_RES; x++) {
|
|
|
444 |
image.setPixel(x, y, qRgb(ptr[x], ptr[x], ptr[x]));
|
|
|
445 |
}
|
|
|
446 |
}
|
|
|
447 |
}
|
|
|
448 |
|
|
|
449 |
void DepthProcessor::convertQImageToMat3C(QImage &image, cv::Mat &mat) {
|
|
|
450 |
for (int y = 0; y < Y_RES; y++) {
|
|
|
451 |
for (int x = 0; x < X_RES; x++) {
|
|
|
452 |
cv::Vec3b &pixel = mat.at<cv::Vec3b>(y, x);
|
|
|
453 |
QColor pixColor(image.pixel(x, y));
|
|
|
454 |
pixel.val[0] = pixColor.blue();
|
|
|
455 |
pixel.val[1] = pixColor.green();
|
|
|
456 |
pixel.val[2] = pixColor.red();
|
|
|
457 |
}
|
|
|
458 |
}
|
|
|
459 |
}
|