| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | #ifdef YODAU_OPENCV | ||
| 2 | |||
| 3 | #include "opencv_client.hpp" | ||
| 4 | |||
| 5 | #include <charconv> | ||
| 6 | #include <filesystem> | ||
| 7 | |||
| 8 | #ifdef __linux__ | ||
| 9 | #include <fcntl.h> | ||
| 10 | #include <linux/videodev2.h> | ||
| 11 | #include <sys/ioctl.h> | ||
| 12 | #include <unistd.h> | ||
| 13 | #endif | ||
| 14 | |||
| 15 | namespace yodau::backend { | ||
| 16 | |||
| 17 | #ifdef __linux__ | ||
| 18 | namespace { | ||
| 19 | [[maybe_unused]] bool is_capture_device(const std::string& path) { | ||
| 20 | const int fd = ::open(path.c_str(), O_RDONLY | O_NONBLOCK); | ||
| 21 | if (fd < 0) { | ||
| 22 | return false; | ||
| 23 | } | ||
| 24 | |||
| 25 | v4l2_capability cap {}; | ||
| 26 | const int rc = ::ioctl(fd, VIDIOC_QUERYCAP, &cap); | ||
| 27 | ::close(fd); | ||
| 28 | |||
| 29 | if (rc < 0) { | ||
| 30 | return false; | ||
| 31 | } | ||
| 32 | |||
| 33 | std::uint32_t caps = cap.capabilities; | ||
| 34 | if (caps & V4L2_CAP_DEVICE_CAPS) { | ||
| 35 | caps = cap.device_caps; | ||
| 36 | } | ||
| 37 | |||
| 38 | const bool capture = (caps & V4L2_CAP_VIDEO_CAPTURE) | ||
| 39 | || (caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE); | ||
| 40 | |||
| 41 | const bool streaming = (caps & V4L2_CAP_STREAMING); | ||
| 42 | |||
| 43 | return capture && streaming; | ||
| 44 | } | ||
| 45 | } | ||
| 46 | #endif | ||
| 47 | |||
| 48 | ✗ | int opencv_client::local_index_from_path(const std::string& path) const { | |
| 49 | ✗ | const std::string pref = "/dev/video"; | |
| 50 | ✗ | if (path.rfind(pref, 0) != 0) { | |
| 51 | return -1; | ||
| 52 | } | ||
| 53 | |||
| 54 | ✗ | const auto tail = path.substr(pref.size()); | |
| 55 | ✗ | int idx = -1; | |
| 56 | |||
| 57 | ✗ | const auto res | |
| 58 | ✗ | = std::from_chars(tail.data(), tail.data() + tail.size(), idx); | |
| 59 | ✗ | if (res.ec != std::errc() || res.ptr != tail.data() + tail.size()) { | |
| 60 | return -1; | ||
| 61 | } | ||
| 62 | |||
| 63 | ✗ | return idx; | |
| 64 | ✗ | } | |
| 65 | |||
| 66 | ✗ | frame opencv_client::mat_to_frame(const cv::Mat& m) const { | |
| 67 | ✗ | frame f; | |
| 68 | ✗ | f.width = m.cols; | |
| 69 | ✗ | f.height = m.rows; | |
| 70 | ✗ | f.stride = static_cast<int>(m.step); | |
| 71 | ✗ | f.ts = std::chrono::steady_clock::now(); | |
| 72 | |||
| 73 | ✗ | if (m.channels() == 3 && m.type() == CV_8UC3) { | |
| 74 | ✗ | f.format = pixel_format::bgr24; | |
| 75 | ✗ | f.data.assign(m.data, m.data + m.total() * m.elemSize()); | |
| 76 | return f; | ||
| 77 | } | ||
| 78 | |||
| 79 | ✗ | cv::Mat bgr; | |
| 80 | ✗ | if (m.channels() == 1) { | |
| 81 | ✗ | cv::cvtColor(m, bgr, cv::COLOR_GRAY2BGR); | |
| 82 | ✗ | } else if (m.channels() == 4) { | |
| 83 | ✗ | cv::cvtColor(m, bgr, cv::COLOR_BGRA2BGR); | |
| 84 | } else { | ||
| 85 | ✗ | m.convertTo(bgr, CV_8UC3); | |
| 86 | } | ||
| 87 | |||
| 88 | ✗ | f.format = pixel_format::bgr24; | |
| 89 | ✗ | f.stride = static_cast<int>(bgr.step); | |
| 90 | ✗ | f.data.assign(bgr.data, bgr.data + bgr.total() * bgr.elemSize()); | |
| 91 | ✗ | return f; | |
| 92 | ✗ | } | |
| 93 | |||
| 94 | ✗ | void opencv_client::daemon_start( | |
| 95 | const stream& s, const std::function<void(frame&&)>& on_frame, | ||
| 96 | const std::stop_token& st | ||
| 97 | ) { | ||
| 98 | ✗ | const auto path = s.get_path(); | |
| 99 | ✗ | cv::VideoCapture cap; | |
| 100 | |||
| 101 | ✗ | const auto idx = local_index_from_path(path); | |
| 102 | ✗ | if (idx >= 0) { | |
| 103 | ✗ | cap.open(idx); | |
| 104 | } else { | ||
| 105 | ✗ | cap.open(path); | |
| 106 | } | ||
| 107 | |||
| 108 | ✗ | if (!cap.isOpened()) { | |
| 109 | ✗ | return; | |
| 110 | } | ||
| 111 | |||
| 112 | ✗ | cv::Mat m; | |
| 113 | ✗ | while (!st.stop_requested()) { | |
| 114 | ✗ | if (!cap.read(m) || m.empty()) { | |
| 115 | ✗ | if (s.is_looping() && s.get_type() == stream_type::file) { | |
| 116 | ✗ | cap.set(cv::CAP_PROP_POS_FRAMES, 0); | |
| 117 | ✗ | continue; | |
| 118 | } | ||
| 119 | ✗ | break; | |
| 120 | } | ||
| 121 | |||
| 122 | ✗ | auto f = mat_to_frame(m); | |
| 123 | ✗ | on_frame(std::move(f)); | |
| 124 | ✗ | } | |
| 125 | ✗ | } | |
| 126 | |||
| 127 | ✗ | float opencv_client::cross_z( | |
| 128 | const point& a, const point& b, const point& c | ||
| 129 | ) const { | ||
| 130 | ✗ | const float abx = b.x - a.x; | |
| 131 | ✗ | const float aby = b.y - a.y; | |
| 132 | ✗ | const float acx = c.x - a.x; | |
| 133 | ✗ | const float acy = c.y - a.y; | |
| 134 | ✗ | return abx * acy - aby * acx; | |
| 135 | } | ||
| 136 | |||
| 137 | ✗ | int opencv_client::orient( | |
| 138 | const point& a, const point& b, const point& c | ||
| 139 | ) const { | ||
| 140 | ✗ | const float v = cross_z(a, b, c); | |
| 141 | ✗ | if (v > point::epsilon) { | |
| 142 | return 1; | ||
| 143 | } | ||
| 144 | ✗ | if (v < -point::epsilon) { | |
| 145 | ✗ | return -1; | |
| 146 | } | ||
| 147 | return 0; | ||
| 148 | } | ||
| 149 | |||
| 150 | ✗ | bool opencv_client::between(float a, float b, float c) const { | |
| 151 | ✗ | return (a <= c + point::epsilon && c <= b + point::epsilon) | |
| 152 | ✗ | || (b <= c + point::epsilon && c <= a + point::epsilon); | |
| 153 | } | ||
| 154 | |||
| 155 | ✗ | bool opencv_client::on_segment( | |
| 156 | const point& a, const point& b, const point& c | ||
| 157 | ) const { | ||
| 158 | ✗ | return orient(a, b, c) == 0 && between(a.x, b.x, c.x) | |
| 159 | ✗ | && between(a.y, b.y, c.y); | |
| 160 | } | ||
| 161 | |||
| 162 | ✗ | bool opencv_client::segments_intersect( | |
| 163 | const point& p1, const point& p2, const point& q1, const point& q2 | ||
| 164 | ) const { | ||
| 165 | ✗ | const int o1 = orient(p1, p2, q1); | |
| 166 | ✗ | const int o2 = orient(p1, p2, q2); | |
| 167 | ✗ | const int o3 = orient(q1, q2, p1); | |
| 168 | ✗ | const int o4 = orient(q1, q2, p2); | |
| 169 | |||
| 170 | ✗ | if (o1 != o2 && o3 != o4) { | |
| 171 | return true; | ||
| 172 | } | ||
| 173 | |||
| 174 | ✗ | if (o1 == 0 && on_segment(p1, p2, q1)) { | |
| 175 | return true; | ||
| 176 | } | ||
| 177 | ✗ | if (o2 == 0 && on_segment(p1, p2, q2)) { | |
| 178 | return true; | ||
| 179 | } | ||
| 180 | ✗ | if (o3 == 0 && on_segment(q1, q2, p1)) { | |
| 181 | return true; | ||
| 182 | } | ||
| 183 | ✗ | if (o4 == 0 && on_segment(q1, q2, p2)) { | |
| 184 | ✗ | return true; | |
| 185 | } | ||
| 186 | |||
| 187 | return false; | ||
| 188 | } | ||
| 189 | |||
| 190 | ✗ | std::optional<point> opencv_client::segment_intersection( | |
| 191 | const point& p1, const point& p2, const point& q1, const point& q2 | ||
| 192 | ) const { | ||
| 193 | ✗ | const float rpx = p2.x - p1.x; | |
| 194 | ✗ | const float rpy = p2.y - p1.y; | |
| 195 | ✗ | const float spx = q2.x - q1.x; | |
| 196 | ✗ | const float spy = q2.y - q1.y; | |
| 197 | |||
| 198 | ✗ | const float den = rpx * spy - rpy * spx; | |
| 199 | ✗ | if (std::abs(den) <= point::epsilon) { | |
| 200 | ✗ | return {}; | |
| 201 | } | ||
| 202 | |||
| 203 | ✗ | const float qpx = q1.x - p1.x; | |
| 204 | ✗ | const float qpy = q1.y - p1.y; | |
| 205 | |||
| 206 | ✗ | const float t = (qpx * spy - qpy * spx) / den; | |
| 207 | ✗ | const float u = (qpx * rpy - qpy * rpx) / den; | |
| 208 | |||
| 209 | ✗ | if (t < -point::epsilon || t > 1.0f + point::epsilon) { | |
| 210 | ✗ | return {}; | |
| 211 | } | ||
| 212 | ✗ | if (u < -point::epsilon || u > 1.0f + point::epsilon) { | |
| 213 | ✗ | return {}; | |
| 214 | } | ||
| 215 | |||
| 216 | ✗ | point out; | |
| 217 | ✗ | out.x = p1.x + t * rpx; | |
| 218 | ✗ | out.y = p1.y + t * rpy; | |
| 219 | ✗ | return out; | |
| 220 | } | ||
| 221 | |||
| 222 | ✗ | void opencv_client::add_motion_event( | |
| 223 | std::vector<event>& out, const std::string& stream_name, | ||
| 224 | const std::chrono::steady_clock::time_point ts, const point& pos_pct | ||
| 225 | ) const { | ||
| 226 | ✗ | event e; | |
| 227 | ✗ | e.kind = event_kind::motion; | |
| 228 | ✗ | e.stream_name = stream_name; | |
| 229 | ✗ | e.ts = ts; | |
| 230 | ✗ | e.pos_pct = pos_pct; | |
| 231 | ✗ | out.push_back(std::move(e)); | |
| 232 | ✗ | } | |
| 233 | |||
| 234 | ✗ | void opencv_client::consider_hit( | |
| 235 | bool& hit, float& best_dist2, point& best_a, point& best_b, point& best_pos, | ||
| 236 | const point& cur_pos_pct, const point& a, const point& b, const point& pos | ||
| 237 | ) const { | ||
| 238 | ✗ | const float dx = pos.x - cur_pos_pct.x; | |
| 239 | ✗ | const float dy = pos.y - cur_pos_pct.y; | |
| 240 | ✗ | const float d2 = dx * dx + dy * dy; | |
| 241 | |||
| 242 | ✗ | if (d2 < best_dist2) { | |
| 243 | ✗ | best_dist2 = d2; | |
| 244 | ✗ | best_a = a; | |
| 245 | ✗ | best_b = b; | |
| 246 | ✗ | best_pos = pos; | |
| 247 | ✗ | hit = true; | |
| 248 | } | ||
| 249 | ✗ | } | |
| 250 | |||
| 251 | ✗ | void opencv_client::test_line_segment_against_contour( | |
| 252 | bool& hit, float& best_dist2, point& best_a, point& best_b, point& best_pos, | ||
| 253 | const point& cur_pos_pct, const std::vector<point>& contour_pct, | ||
| 254 | const point& a, const point& b | ||
| 255 | ) const { | ||
| 256 | ✗ | if (contour_pct.size() < 2) { | |
| 257 | return; | ||
| 258 | } | ||
| 259 | |||
| 260 | ✗ | for (size_t j = 1; j < contour_pct.size(); ++j) { | |
| 261 | ✗ | const auto& c1 = contour_pct[j - 1]; | |
| 262 | ✗ | const auto& c2 = contour_pct[j]; | |
| 263 | |||
| 264 | ✗ | if (segments_intersect(a, b, c1, c2)) { | |
| 265 | ✗ | point ip = cur_pos_pct; | |
| 266 | ✗ | const auto inter = segment_intersection(a, b, c1, c2); | |
| 267 | ✗ | if (inter.has_value()) { | |
| 268 | ✗ | ip = *inter; | |
| 269 | } | ||
| 270 | |||
| 271 | ✗ | consider_hit( | |
| 272 | hit, best_dist2, best_a, best_b, best_pos, cur_pos_pct, a, b, ip | ||
| 273 | ); | ||
| 274 | } | ||
| 275 | } | ||
| 276 | |||
| 277 | ✗ | const auto& c_last = contour_pct.back(); | |
| 278 | ✗ | const auto& c_first = contour_pct.front(); | |
| 279 | ✗ | if (segments_intersect(a, b, c_last, c_first)) { | |
| 280 | ✗ | point ip = cur_pos_pct; | |
| 281 | ✗ | const auto inter = segment_intersection(a, b, c_last, c_first); | |
| 282 | ✗ | if (inter.has_value()) { | |
| 283 | ✗ | ip = *inter; | |
| 284 | } | ||
| 285 | |||
| 286 | ✗ | consider_hit( | |
| 287 | hit, best_dist2, best_a, best_b, best_pos, cur_pos_pct, a, b, ip | ||
| 288 | ); | ||
| 289 | } | ||
| 290 | } | ||
| 291 | |||
| 292 | ✗ | void opencv_client::process_tripwire_for_line( | |
| 293 | std::vector<event>& out, const stream& s, const line& l, | ||
| 294 | const point& prev_pos, const point& cur_pos_pct, | ||
| 295 | const std::vector<point>& contour_pct, | ||
| 296 | const std::chrono::steady_clock::time_point now | ||
| 297 | ) { | ||
| 298 | ✗ | const auto& pts = l.points; | |
| 299 | ✗ | if (pts.size() < 2) { | |
| 300 | ✗ | return; | |
| 301 | } | ||
| 302 | |||
| 303 | ✗ | bool hit = false; | |
| 304 | ✗ | point best_a {}; | |
| 305 | ✗ | point best_b {}; | |
| 306 | ✗ | point best_pos = cur_pos_pct; | |
| 307 | ✗ | float best_dist2 = std::numeric_limits<float>::max(); | |
| 308 | |||
| 309 | ✗ | for (size_t i = 1; i < pts.size(); ++i) { | |
| 310 | ✗ | test_line_segment_against_contour( | |
| 311 | hit, best_dist2, best_a, best_b, best_pos, cur_pos_pct, contour_pct, | ||
| 312 | ✗ | pts[i - 1], pts[i] | |
| 313 | ); | ||
| 314 | } | ||
| 315 | |||
| 316 | ✗ | if (l.closed && pts.size() > 2) { | |
| 317 | ✗ | test_line_segment_against_contour( | |
| 318 | hit, best_dist2, best_a, best_b, best_pos, cur_pos_pct, contour_pct, | ||
| 319 | ✗ | pts.back(), pts.front() | |
| 320 | ); | ||
| 321 | } | ||
| 322 | |||
| 323 | ✗ | if (!hit) { | |
| 324 | return; | ||
| 325 | } | ||
| 326 | |||
| 327 | ✗ | const float prev_side = cross_z(best_a, best_b, prev_pos); | |
| 328 | ✗ | const float cur_side = cross_z(best_a, best_b, cur_pos_pct); | |
| 329 | |||
| 330 | ✗ | std::string dir = "flat"; | |
| 331 | ✗ | if (prev_side <= 0.0f && cur_side > 0.0f) { | |
| 332 | ✗ | dir = "neg_to_pos"; | |
| 333 | ✗ | } else if (prev_side >= 0.0f && cur_side < 0.0f) { | |
| 334 | ✗ | dir = "pos_to_neg"; | |
| 335 | } | ||
| 336 | |||
| 337 | ✗ | if (l.dir == tripwire_dir::neg_to_pos) { | |
| 338 | ✗ | if (dir != "neg_to_pos") { | |
| 339 | return; | ||
| 340 | } | ||
| 341 | ✗ | } else if (l.dir == tripwire_dir::pos_to_neg) { | |
| 342 | ✗ | if (dir != "pos_to_neg") { | |
| 343 | return; | ||
| 344 | } | ||
| 345 | } | ||
| 346 | |||
| 347 | ✗ | const int tripwire_cooldown_ms = 1200; | |
| 348 | ✗ | const std::string key = s.get_name() + "|" + l.name + "|" + dir; | |
| 349 | |||
| 350 | ✗ | bool allow_tripwire = true; | |
| 351 | ✗ | { | |
| 352 | ✗ | std::scoped_lock lock(mtx); | |
| 353 | ✗ | auto it = last_tripwire_by_key.find(key); | |
| 354 | ✗ | if (it != last_tripwire_by_key.end()) { | |
| 355 | ✗ | const auto dt | |
| 356 | ✗ | = std::chrono::duration_cast<std::chrono::milliseconds>( | |
| 357 | ✗ | now - it->second | |
| 358 | ) | ||
| 359 | ✗ | .count(); | |
| 360 | ✗ | if (dt < tripwire_cooldown_ms) { | |
| 361 | allow_tripwire = false; | ||
| 362 | } | ||
| 363 | } | ||
| 364 | |||
| 365 | if (allow_tripwire) { | ||
| 366 | ✗ | last_tripwire_by_key[key] = now; | |
| 367 | } | ||
| 368 | ✗ | } | |
| 369 | |||
| 370 | ✗ | if (!allow_tripwire) { | |
| 371 | ✗ | return; | |
| 372 | } | ||
| 373 | |||
| 374 | ✗ | event t; | |
| 375 | ✗ | t.kind = event_kind::tripwire; | |
| 376 | ✗ | t.stream_name = s.get_name(); | |
| 377 | ✗ | t.line_name = l.name; | |
| 378 | ✗ | t.ts = now; | |
| 379 | ✗ | t.pos_pct = best_pos; | |
| 380 | ✗ | t.message = dir; | |
| 381 | |||
| 382 | ✗ | std::cerr << "tripwire stream=" << t.stream_name << " line=" << t.line_name | |
| 383 | ✗ | << " dir=" << dir << std::endl; | |
| 384 | |||
| 385 | ✗ | out.push_back(std::move(t)); | |
| 386 | ✗ | } | |
| 387 | |||
| 388 | ✗ | std::optional<size_t> opencv_client::find_largest_contour_index( | |
| 389 | const std::vector<std::vector<cv::Point>>& contours | ||
| 390 | ) const { | ||
| 391 | ✗ | if (contours.empty()) { | |
| 392 | ✗ | return {}; | |
| 393 | } | ||
| 394 | |||
| 395 | double max_area = 0.0; | ||
| 396 | size_t max_i = 0; | ||
| 397 | |||
| 398 | ✗ | for (size_t i = 0; i < contours.size(); ++i) { | |
| 399 | ✗ | const double area = cv::contourArea(contours[i]); | |
| 400 | ✗ | if (area > max_area) { | |
| 401 | ✗ | max_area = area; | |
| 402 | ✗ | max_i = i; | |
| 403 | } | ||
| 404 | } | ||
| 405 | |||
| 406 | ✗ | return max_i; | |
| 407 | } | ||
| 408 | |||
| 409 | std::vector<event> | ||
| 410 | ✗ | opencv_client::motion_processor(const stream& s, const frame& f) { | |
| 411 | ✗ | std::vector<event> out; | |
| 412 | |||
| 413 | ✗ | if (f.data.empty() || f.width <= 0 || f.height <= 0) { | |
| 414 | return out; | ||
| 415 | } | ||
| 416 | |||
| 417 | ✗ | cv::Mat bgr( | |
| 418 | ✗ | f.height, f.width, CV_8UC3, const_cast<std::uint8_t*>(f.data.data()), | |
| 419 | ✗ | static_cast<size_t>(f.stride) | |
| 420 | ✗ | ); | |
| 421 | |||
| 422 | ✗ | cv::Mat gray; | |
| 423 | ✗ | cv::cvtColor(bgr, gray, cv::COLOR_BGR2GRAY); | |
| 424 | ✗ | cv::GaussianBlur(gray, gray, cv::Size(5, 5), 0.0); | |
| 425 | |||
| 426 | ✗ | cv::Mat prev_gray; | |
| 427 | ✗ | { | |
| 428 | ✗ | std::scoped_lock lock(mtx); | |
| 429 | ✗ | auto it = prev_gray_by_stream.find(s.get_name()); | |
| 430 | ✗ | if (it == prev_gray_by_stream.end()) { | |
| 431 | ✗ | prev_gray_by_stream.emplace(s.get_name(), gray.clone()); | |
| 432 | ✗ | return out; | |
| 433 | } | ||
| 434 | ✗ | prev_gray = it->second; | |
| 435 | ✗ | it->second = gray.clone(); | |
| 436 | ✗ | } | |
| 437 | |||
| 438 | ✗ | cv::Mat diff; | |
| 439 | ✗ | cv::absdiff(prev_gray, gray, diff); | |
| 440 | ✗ | cv::threshold(diff, diff, 25, 255, cv::THRESH_BINARY); | |
| 441 | |||
| 442 | ✗ | cv::erode(diff, diff, cv::Mat(), cv::Point(-1, -1), 1); | |
| 443 | ✗ | cv::dilate(diff, diff, cv::Mat(), cv::Point(-1, -1), 2); | |
| 444 | |||
| 445 | ✗ | std::vector<std::vector<cv::Point>> contours; | |
| 446 | ✗ | cv::findContours( | |
| 447 | diff, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE | ||
| 448 | ); | ||
| 449 | |||
| 450 | ✗ | if (contours.empty()) { | |
| 451 | return out; | ||
| 452 | } | ||
| 453 | |||
| 454 | ✗ | const auto max_i_opt = find_largest_contour_index(contours); | |
| 455 | ✗ | if (!max_i_opt.has_value()) { | |
| 456 | return out; | ||
| 457 | } | ||
| 458 | |||
| 459 | ✗ | const size_t max_i = *max_i_opt; | |
| 460 | ✗ | const double max_area = cv::contourArea(contours[max_i]); | |
| 461 | |||
| 462 | ✗ | const double min_area = 0.001 * static_cast<double>(diff.rows * diff.cols); | |
| 463 | ✗ | if (max_area < min_area) { | |
| 464 | return out; | ||
| 465 | } | ||
| 466 | |||
| 467 | ✗ | std::vector<cv::Point> approx; | |
| 468 | ✗ | { | |
| 469 | ✗ | const double eps = 3.0; | |
| 470 | ✗ | cv::approxPolyDP(contours[max_i], approx, eps, true); | |
| 471 | } | ||
| 472 | |||
| 473 | ✗ | std::vector<point> contour_pct; | |
| 474 | ✗ | contour_pct.reserve(approx.size()); | |
| 475 | |||
| 476 | ✗ | for (const auto& pt : approx) { | |
| 477 | ✗ | point p; | |
| 478 | ✗ | p.x = static_cast<float>(pt.x) * 100.0f / static_cast<float>(f.width); | |
| 479 | ✗ | p.y = static_cast<float>(pt.y) * 100.0f / static_cast<float>(f.height); | |
| 480 | ✗ | contour_pct.push_back(p); | |
| 481 | } | ||
| 482 | |||
| 483 | ✗ | struct bbox2f { | |
| 484 | float min_x; | ||
| 485 | float min_y; | ||
| 486 | float max_x; | ||
| 487 | float max_y; | ||
| 488 | }; | ||
| 489 | |||
| 490 | ✗ | bbox2f motion_box {}; | |
| 491 | ✗ | bool motion_box_ok = false; | |
| 492 | |||
| 493 | ✗ | if (!contour_pct.empty()) { | |
| 494 | ✗ | motion_box.min_x = 100.0f; | |
| 495 | ✗ | motion_box.min_y = 100.0f; | |
| 496 | ✗ | motion_box.max_x = 0.0f; | |
| 497 | ✗ | motion_box.max_y = 0.0f; | |
| 498 | |||
| 499 | ✗ | for (const auto& p : contour_pct) { | |
| 500 | ✗ | if (p.x < motion_box.min_x) { | |
| 501 | ✗ | motion_box.min_x = p.x; | |
| 502 | } | ||
| 503 | ✗ | if (p.y < motion_box.min_y) { | |
| 504 | ✗ | motion_box.min_y = p.y; | |
| 505 | } | ||
| 506 | ✗ | if (p.x > motion_box.max_x) { | |
| 507 | ✗ | motion_box.max_x = p.x; | |
| 508 | } | ||
| 509 | ✗ | if (p.y > motion_box.max_y) { | |
| 510 | ✗ | motion_box.max_y = p.y; | |
| 511 | } | ||
| 512 | } | ||
| 513 | |||
| 514 | ✗ | motion_box_ok = true; | |
| 515 | } | ||
| 516 | |||
| 517 | ✗ | const int nz = cv::countNonZero(diff); | |
| 518 | ✗ | const int total = diff.rows * diff.cols; | |
| 519 | ✗ | const double ratio = total > 0 ? static_cast<double>(nz) / total : 0.0; | |
| 520 | |||
| 521 | ✗ | if (ratio < 0.01) { | |
| 522 | return out; | ||
| 523 | } | ||
| 524 | |||
| 525 | ✗ | const double min_ratio = 0.02; | |
| 526 | ✗ | const int cooldown_ms = 150; | |
| 527 | |||
| 528 | ✗ | if (ratio < min_ratio) { | |
| 529 | return out; | ||
| 530 | } | ||
| 531 | |||
| 532 | ✗ | const auto now = std::chrono::steady_clock::now(); | |
| 533 | ✗ | { | |
| 534 | ✗ | std::scoped_lock lock(mtx); | |
| 535 | ✗ | auto it = last_emit_by_stream.find(s.get_name()); | |
| 536 | ✗ | if (it != last_emit_by_stream.end()) { | |
| 537 | ✗ | const auto dt | |
| 538 | ✗ | = std::chrono::duration_cast<std::chrono::milliseconds>( | |
| 539 | ✗ | now - it->second | |
| 540 | ) | ||
| 541 | ✗ | .count(); | |
| 542 | ✗ | if (dt < cooldown_ms) { | |
| 543 | ✗ | return out; | |
| 544 | } | ||
| 545 | } | ||
| 546 | ✗ | last_emit_by_stream[s.get_name()] = now; | |
| 547 | ✗ | } | |
| 548 | |||
| 549 | ✗ | cv::Moments mm = cv::moments(contours[max_i]); | |
| 550 | ✗ | double cx = 0.0; | |
| 551 | ✗ | double cy = 0.0; | |
| 552 | ✗ | if (mm.m00 > 0.0) { | |
| 553 | ✗ | cx = mm.m10 / mm.m00; | |
| 554 | ✗ | cy = mm.m01 / mm.m00; | |
| 555 | } else { | ||
| 556 | ✗ | cx = static_cast<double>(f.width) * 0.5; | |
| 557 | ✗ | cy = static_cast<double>(f.height) * 0.5; | |
| 558 | } | ||
| 559 | |||
| 560 | ✗ | const point cur_pos_pct { static_cast<float>(cx * 100.0 / f.width), | |
| 561 | ✗ | static_cast<float>(cy * 100.0 / f.height) }; | |
| 562 | |||
| 563 | ✗ | point prev_pos {}; | |
| 564 | ✗ | bool has_prev = false; | |
| 565 | ✗ | { | |
| 566 | ✗ | std::scoped_lock lock(mtx); | |
| 567 | ✗ | auto it = last_pos_by_stream.find(s.get_name()); | |
| 568 | ✗ | if (it != last_pos_by_stream.end()) { | |
| 569 | ✗ | prev_pos = it->second; | |
| 570 | ✗ | has_prev = true; | |
| 571 | } | ||
| 572 | ✗ | last_pos_by_stream[s.get_name()] = cur_pos_pct; | |
| 573 | ✗ | } | |
| 574 | |||
| 575 | ✗ | if (has_prev) { | |
| 576 | ✗ | const auto lines = s.lines_snapshot(); | |
| 577 | ✗ | for (const auto& lp : lines) { | |
| 578 | ✗ | if (!lp) { | |
| 579 | ✗ | continue; | |
| 580 | } | ||
| 581 | |||
| 582 | ✗ | const auto& pts = lp->points; | |
| 583 | ✗ | if (pts.empty()) { | |
| 584 | ✗ | continue; | |
| 585 | } | ||
| 586 | |||
| 587 | ✗ | if (motion_box_ok) { | |
| 588 | ✗ | bbox2f line_box {}; | |
| 589 | ✗ | line_box.min_x = 100.0f; | |
| 590 | ✗ | line_box.min_y = 100.0f; | |
| 591 | ✗ | line_box.max_x = 0.0f; | |
| 592 | ✗ | line_box.max_y = 0.0f; | |
| 593 | |||
| 594 | ✗ | for (const auto& p : pts) { | |
| 595 | ✗ | if (p.x < line_box.min_x) { | |
| 596 | ✗ | line_box.min_x = p.x; | |
| 597 | } | ||
| 598 | ✗ | if (p.y < line_box.min_y) { | |
| 599 | ✗ | line_box.min_y = p.y; | |
| 600 | } | ||
| 601 | ✗ | if (p.x > line_box.max_x) { | |
| 602 | ✗ | line_box.max_x = p.x; | |
| 603 | } | ||
| 604 | ✗ | if (p.y > line_box.max_y) { | |
| 605 | ✗ | line_box.max_y = p.y; | |
| 606 | } | ||
| 607 | } | ||
| 608 | |||
| 609 | ✗ | const bool x_overlap | |
| 610 | ✗ | = !(line_box.max_x < motion_box.min_x | |
| 611 | ✗ | || line_box.min_x > motion_box.max_x); | |
| 612 | |||
| 613 | ✗ | const bool y_overlap | |
| 614 | ✗ | = !(line_box.max_y < motion_box.min_y | |
| 615 | ✗ | || line_box.min_y > motion_box.max_y); | |
| 616 | |||
| 617 | ✗ | if (!(x_overlap && y_overlap)) { | |
| 618 | ✗ | continue; | |
| 619 | } | ||
| 620 | } | ||
| 621 | |||
| 622 | ✗ | process_tripwire_for_line( | |
| 623 | ✗ | out, s, *lp, prev_pos, cur_pos_pct, contour_pct, now | |
| 624 | ); | ||
| 625 | } | ||
| 626 | ✗ | } | |
| 627 | |||
| 628 | ✗ | add_motion_event(out, s.get_name(), now, cur_pos_pct); | |
| 629 | |||
| 630 | ✗ | const int grid_step = 24; | |
| 631 | ✗ | const int max_bubbles = 80; | |
| 632 | |||
| 633 | ✗ | int bubbled = 0; | |
| 634 | ✗ | for (int y = 0; y < diff.rows; y += grid_step) { | |
| 635 | ✗ | const std::uint8_t* row = diff.ptr<std::uint8_t>(y); | |
| 636 | ✗ | for (int x = 0; x < diff.cols; x += grid_step) { | |
| 637 | ✗ | if (row[x] == 0) { | |
| 638 | ✗ | continue; | |
| 639 | } | ||
| 640 | |||
| 641 | ✗ | point p; | |
| 642 | ✗ | p.x = static_cast<float>(x) * 100.0f / static_cast<float>(f.width); | |
| 643 | ✗ | p.y = static_cast<float>(y) * 100.0f / static_cast<float>(f.height); | |
| 644 | |||
| 645 | ✗ | add_motion_event(out, s.get_name(), now, p); | |
| 646 | ✗ | bubbled++; | |
| 647 | |||
| 648 | ✗ | if (bubbled >= max_bubbles) { | |
| 649 | break; | ||
| 650 | } | ||
| 651 | } | ||
| 652 | |||
| 653 | ✗ | if (bubbled >= max_bubbles) { | |
| 654 | break; | ||
| 655 | } | ||
| 656 | } | ||
| 657 | |||
| 658 | return out; | ||
| 659 | ✗ | } | |
| 660 | |||
| 661 | ✗ | stream_manager::daemon_start_fn opencv_client::daemon_start_fn() { | |
| 662 | ✗ | return [this]( | |
| 663 | const stream& s, std::function<void(frame&&)> on_frame, | ||
| 664 | std::stop_token st | ||
| 665 | ✗ | ) { daemon_start(s, on_frame, st); }; | |
| 666 | } | ||
| 667 | |||
| 668 | ✗ | stream_manager::frame_processor_fn opencv_client::frame_processor_fn() { | |
| 669 | ✗ | return [this](const stream& s, const frame& f) { | |
| 670 | ✗ | return motion_processor(s, f); | |
| 671 | ✗ | }; | |
| 672 | } | ||
| 673 | |||
| 674 | namespace { | ||
| 675 | ✗ | opencv_client& global_opencv_client() { | |
| 676 | ✗ | static opencv_client inst; | |
| 677 | ✗ | return inst; | |
| 678 | } | ||
| 679 | } | ||
| 680 | |||
| 681 | ✗ | void opencv_daemon_start( | |
| 682 | const stream& s, const std::function<void(frame&&)>& on_frame, | ||
| 683 | const std::stop_token& st | ||
| 684 | ) { | ||
| 685 | ✗ | global_opencv_client().daemon_start(s, on_frame, st); | |
| 686 | ✗ | } | |
| 687 | |||
| 688 | ✗ | std::vector<event> opencv_motion_processor(const stream& s, const frame& f) { | |
| 689 | ✗ | return global_opencv_client().motion_processor(s, f); | |
| 690 | } | ||
| 691 | |||
| 692 | } | ||
| 693 | |||
| 694 | #endif | ||
| 695 |