From 54f83e153549dd1a63bcc8fa5e55fb171621a989 Mon Sep 17 00:00:00 2001
From: AlexeyAB <alexeyab84@gmail.com>
Date: Wed, 17 Jan 2018 18:05:07 +0000
Subject: [PATCH] Some fixes
---
src/yolo_console_dll.cpp | 85 +++++++++++++++++++++++-------------------
1 files changed, 46 insertions(+), 39 deletions(-)
diff --git a/src/yolo_console_dll.cpp b/src/yolo_console_dll.cpp
index 0e291a4..a9ce6b9 100644
--- a/src/yolo_console_dll.cpp
+++ b/src/yolo_console_dll.cpp
@@ -11,10 +11,11 @@
#ifdef _WIN32
#define OPENCV
-#include "windows.h"
#endif
-#define TRACK_OPTFLOW
+// To use tracking - uncomment the following line. Tracking is supported only by OpenCV 3.x
+//#define TRACK_OPTFLOW
+
#include "yolo_v2_class.hpp" // imported functions from DLL
#ifdef OPENCV
@@ -90,17 +91,27 @@
int main(int argc, char *argv[])
{
+ std::string names_file = "data/voc.names";
+ std::string cfg_file = "cfg/yolo-voc.cfg";
+ std::string weights_file = "yolo-voc.weights";
std::string filename;
- if (argc > 1) filename = argv[1];
- Detector detector("cfg/yolo-voc.cfg", "yolo-voc.weights");
- //Detector detector("tiny-yolo-voc_air.cfg", "backup/tiny-yolo-voc_air_5000.weights");
+ if (argc > 4) { //voc.names yolo-voc.cfg yolo-voc.weights test.mp4
+ names_file = argv[1];
+ cfg_file = argv[2];
+ weights_file = argv[3];
+ filename = argv[4];
+ }
+ else if (argc > 1) filename = argv[1];
- auto obj_names = objects_names_from_file("data/voc.names");
+ Detector detector(cfg_file, weights_file);
+
+ auto obj_names = objects_names_from_file(names_file);
std::string out_videofile = "result.avi";
bool const save_output_videofile = false;
#ifdef TRACK_OPTFLOW
Tracker_optflow tracker_flow;
+ detector.wait_stream = true;
#endif
while (true)
@@ -131,7 +142,7 @@
int current_det_fps = 0, current_cap_fps = 0;
std::thread t_detect, t_cap, t_videowrite;
std::mutex mtx;
- std::condition_variable cv;
+ std::condition_variable cv_detected, cv_pre_tracked;
std::chrono::steady_clock::time_point steady_start, steady_end;
cv::VideoCapture cap(filename); cap >> cur_frame;
int const video_fps = cap.get(CV_CAP_PROP_FPS);
@@ -153,35 +164,29 @@
// swap result bouned-boxes and input-frame
if(consumed)
{
- {
- std::unique_lock<std::mutex> lock(mtx);
- det_image = detector.mat_to_image_resize(cur_frame);
- result_vec = thread_result_vec;
- result_vec = detector.tracking(result_vec); // comment it - if track_id is not required
- consumed = false;
-
+ std::unique_lock<std::mutex> lock(mtx);
+ det_image = detector.mat_to_image_resize(cur_frame);
+ result_vec = thread_result_vec;
+ result_vec = detector.tracking(result_vec); // comment it - if track_id is not required
#ifdef TRACK_OPTFLOW
- // track optical flow
- if (track_optflow_queue.size() > 0) {
- std::queue<cv::Mat> new_track_optflow_queue;
- //std::cout << "\n !!!! all = " << track_optflow_queue.size() << ", cur = " << passed_flow_frames << std::endl;
- tracker_flow.update_tracking_flow(track_optflow_queue.front());
+ // track optical flow
+ if (track_optflow_queue.size() > 0) {
+ std::queue<cv::Mat> new_track_optflow_queue;
+ //std::cout << "\n !!!! all = " << track_optflow_queue.size() << ", cur = " << passed_flow_frames << std::endl;
+ tracker_flow.update_tracking_flow(track_optflow_queue.front());
+ while (track_optflow_queue.size() > 1) {
track_optflow_queue.pop();
- while (track_optflow_queue.size() > 0) {
- result_vec = tracker_flow.tracking_flow(track_optflow_queue.front(), result_vec);
- if (track_optflow_queue.size() <= passed_flow_frames && new_track_optflow_queue.size() == 0)
- new_track_optflow_queue = track_optflow_queue;
-
- track_optflow_queue.pop();
- }
- track_optflow_queue = new_track_optflow_queue;
- new_track_optflow_queue.swap(std::queue<cv::Mat>());
- passed_flow_frames = 0;
+ result_vec = tracker_flow.tracking_flow(track_optflow_queue.front(), result_vec);
+ if (track_optflow_queue.size() <= passed_flow_frames && new_track_optflow_queue.size() == 0)
+ new_track_optflow_queue = track_optflow_queue;
}
+ track_optflow_queue = new_track_optflow_queue;
+ passed_flow_frames = 0;
}
#endif
-
+ consumed = false;
+ cv_pre_tracked.notify_all();
}
// launch thread once - Detection
if (!t_detect.joinable()) {
@@ -189,15 +194,15 @@
auto current_image = det_image;
consumed = true;
while (current_image.use_count() > 0) {
- auto result = detector.detect_resized(*current_image, frame_size, 0.24, false); // true
- //Sleep(200);
- Sleep(50);
+ auto result = detector.detect_resized(*current_image, frame_size, 0.20, false); // true
++fps_det_counter;
std::unique_lock<std::mutex> lock(mtx);
thread_result_vec = result;
current_image = det_image;
consumed = true;
- cv.notify_all();
+ cv_detected.notify_all();
+ if(detector.wait_stream)
+ while (consumed) cv_pre_tracked.wait(lock);
}
});
}
@@ -218,7 +223,7 @@
result_vec = tracker_flow.tracking_flow(cur_frame, result_vec); // track optical flow
#endif
- draw_boxes(cur_frame, result_vec, obj_names, 3, current_det_fps, current_cap_fps);
+ draw_boxes(cur_frame, result_vec, obj_names, 3, current_det_fps, current_cap_fps); // 3 or 16ms
//show_console_result(result_vec, obj_names);
if (output_video.isOpened() && videowrite_ready) {
@@ -231,11 +236,13 @@
}
}
+#ifndef TRACK_OPTFLOW
// wait detection result for video-file only (not for net-cam)
- //if (protocol != "rtsp://" && protocol != "http://" && protocol != "https:/") {
- // std::unique_lock<std::mutex> lock(mtx);
- // while (!consumed) cv.wait(lock);
- //}
+ if (protocol != "rtsp://" && protocol != "http://" && protocol != "https:/") {
+ std::unique_lock<std::mutex> lock(mtx);
+ while (!consumed) cv_detected.wait(lock);
+ }
+#endif
}
if (t_cap.joinable()) t_cap.join();
if (t_detect.joinable()) t_detect.join();
--
Gitblit v1.10.0