This commit is contained in:
2026-01-09 13:59:10 +08:00
commit 336a19762a
378 changed files with 99177 additions and 0 deletions

View File

@@ -0,0 +1,973 @@
/*
本程序用于视频分流
1.推流摄像头画面,使用UDP原生协议进行推流,交由YOLO模型进行处理
2.接收YOLO传来的坐标和深度数据
3.根据获取到的数据绘制边框和相应数据
4.根据距离信息进行报警和图片视频保存
5.输出处理完毕的视频帧
*/
#include <iostream>
#include <opencv4/opencv2/opencv.hpp>
#include <mqtt/async_client.h>
#include <nlohmann/json.hpp>
#include <mutex>
#include <vector>
#include <queue> //队列
#include <condition_variable>
#include <atomic>
#include <deque> //双端队列
#include <boost/process.hpp>
#include "Netra.hpp"
#include <X11/Xlib.h>
#include <optional>
using namespace std;
using namespace QCL;
using namespace cv;
using namespace chrono_literals;
namespace bp = boost::process;
// 路径和接口
const string mqtt_url = "tcp://127.0.0.1:1883";
const string clientId = "video_subData";
const string Topic = "/video/PersonData";
const string filePath = "/home/orangepi/RKApp/InitAuth/conf/.env"; // 配置保存路径
const string warningPath = "/mnt/save/warning/"; // 报警图片保存路径
const string videoPath = "/mnt/save/video/"; // 报警视频保存路径
// 保存检测结果
struct Dection
{
double x, y, w, h; // 支持归一化(0~1)或像素值
double distance;
};
// 保存报警距离
struct dangerDistance
{
int danger;
int warn;
int safe;
} dis;
struct Point2N
{
double x, y;
}; // 归一化坐标
struct ZoneBox
{
string name;
array<Point2N, 4> vertices; // 存 0..1
};
// 报警框坐标
ZoneBox g_safe, g_warn, g_dang;
// 全局变量和对象
VideoCapture cap; // 在 videoInit 中用 V4L2 打开
Mat handleFrame; // 存放处理后的帧
const int Qos = 0;
mqtt::async_client client(mqtt_url, clientId);
mutex detMutex; // 保护latestDection的互斥锁
vector<Dection> latestDection; // 保存最新接收到的检测结果
mutex alertMutex; // 保护alertQueue的互斥锁
condition_variable alertcv; // 通知报警线程有新任务
queue<nlohmann::json> alertQueue; // 存放解析后的数据
std::atomic<bool> alertWorkerRunning{false}; // 工作线程运行标志
atomic<bool> outPutMode = false; // 保存报警输出false--电平
bool mainRunning = true;
// 视频相关
const int FPS = 30; // 帧率
const int PRE_RECORD_SECONDS = 10; // 预录制时长
const int MAX_BUFFER_SIZE = FPS * PRE_RECORD_SECONDS; // 缓冲区最大帧数
mutex bufferMutex; // 保护缓冲区的锁
deque<Mat> videoDeque; // 环形缓冲区,存储最近十秒的画面帧
atomic<bool> isRecording{false}; // 是否正在写入文件
atomic<bool> mediaMirror{false}; // 是否镜像
atomic<bool> mediaFlip{false}; // 是否翻转
// 用于报警线程的“最新一帧检测结果”(丢弃旧帧,保证低延迟)
static std::mutex latestAlertMutex;
static std::condition_variable latestAlertCv;
static std::optional<std::vector<Dection>> latestAlertDets; // 仅存最新
static std::atomic<uint64_t> latestAlertSeq{0};
// 把阈值/输出模式缓存到内存,避免频繁读文件
struct RuntimeConfig
{
std::atomic<int> danger{0};
std::atomic<int> warn{0};
std::atomic<int> safe{0};
std::atomic<bool> outPutMode{false};
};
static RuntimeConfig g_cfg;
// mqtt初始化
void MqttInit();
// 摄像头管道初始化
bool videoInit(VideoCapture &cap);
// ffmpeg管道初始化
FILE *pipeInit();
// 对单个帧进行处理
bool processFrame(VideoCapture &cap, FILE *pipe, Mat &frame, int64 &count, chrono::steady_clock::time_point &t0);
// 资源清理
void cleanup(FILE *pipe, VideoCapture &cap);
// 主循环
void mainLoop(VideoCapture &cap, FILE *pipe);
// mqtt接收订阅消息的回调
void getMsgCallback(mqtt::const_message_ptr msg);
// 绘制矩形方框和深度信息
void drawRect(double x, double y, double w, double h, double distance);
// 报警线程
void warnThread();
// 获取报警距离和输出模式
bool GetDistance();
// 调用报警输出程序
void setGPIOLevel(int level);
// 获取当前时间字符串做文件名
string getCurrentTimeStr();
// 保存图片
void saveAlarmImage(const cv::Mat &frame);
// 保存视频
void saveAlarmVideo(std::deque<cv::Mat> bufferSnapshot);
// 从配置文件读取 zonesSAFE/WARN/DANG 的 4 点,归一化值)
bool LoadZonesFromEnv();
// 绘制 zones 多边形(将归一化坐标按当前帧尺寸缩放)
void drawZones(Mat &img);
// 检测框底边是否接触 danger 多边形(用缩放后的像素点)
bool bottomTouchesDanger(const Dection &d, const ZoneBox &dangerBox);
// 读取配置,获取翻转镜像设置
void loadMirrerSet();
// 进行镜像和翻转
void SetMirror(Mat &frame);
// 只在 .env 文件发生修改后才重新加载 zones 和 镜像配置
void ReloadConfigIfChanged();
// 退出信号
void Exit(int sig);
int main()
{
// 确保服务器启动
this_thread::sleep_for(5s);
// 初始化摄像头
if (!videoInit(cap))
{
return -1;
}
// 初始化FFmpeg管道
FILE *pipe = pipeInit();
if (!pipe)
{
return -1;
}
// 先加载一次区域,避免首帧没有框
LoadZonesFromEnv();
signal(SIGINT, Exit);
// 初始化mqtt:订阅,回调函数
MqttInit();
// 主处理循环
mainLoop(cap, pipe);
// 清理资源
cleanup(pipe, cap);
return 0;
}
// 只在 .env 文件发生修改后才重新加载 zones 和 镜像配置
void ReloadConfigIfChanged()
{
static std::filesystem::file_time_type lastEnvWriteTime{};
static bool first = true;
std::error_code ec;
auto curWriteTime = std::filesystem::last_write_time(filePath, ec);
if (ec)
{
// 获取失败就跳过,避免频繁输出
return;
}
// 首次调用:直接加载一次并记录时间
if (first)
{
first = false;
lastEnvWriteTime = curWriteTime;
LoadZonesFromEnv();
loadMirrerSet();
return;
}
// 文件未修改则直接返回
if (curWriteTime <= lastEnvWriteTime)
{
return;
}
// 文件有更新:重新读取配置
lastEnvWriteTime = curWriteTime;
LoadZonesFromEnv();
loadMirrerSet();
}
// 进行镜像和翻转
void SetMirror(Mat &frame)
{
bool mirror = mediaMirror.load();
bool flipV = mediaFlip.load();
if (mirror && flipV)
{ // 同时镜像和翻转
cv::flip(frame, frame, -1);
}
else if (mirror)
{ // 只镜像
cv::flip(frame, frame, 1);
}
else if (flipV)
{ // 只翻转
cv::flip(frame, frame, 0);
}
}
// 读取配置,获取翻转镜像设置
void loadMirrerSet()
{
ReadFile rf(filePath);
if (!rf.Open())
{
cerr << "文件打开失败" << endl;
return;
}
else
{ // 解析文件
auto lines = rf.ReadLines();
rf.Close();
auto getBool([&](const string &key, bool &out)
{
out = false;
for(auto&line:lines)
{
if(line.rfind(key+"=",0)==0)
{
// 兼容大小写与可能的空格
auto val = line.substr(key.size() + 1);
for (auto &c : val) c = ::tolower(c);
val.erase(remove_if(val.begin(), val.end(), ::isspace), val.end());
if(val=="true")
{
out = true;
}
return out;
}
}
return out; });
bool mirror = false, flip = false;
getBool("MEDIA_MIRROR", mirror);
getBool("MEDIA_FLIP", flip);
mediaMirror.store(mirror);
mediaFlip.store(flip);
}
}
// 退出信号
void Exit(int sig)
{
cout << "Exiting....." << endl;
// 停止主循环与报警线程
mainRunning = false;
alertWorkerRunning = false;
// 唤醒报警线程从 wait/ wait_for 中返回
latestAlertCv.notify_all();
alertcv.notify_all();
}
// 检测框底边是否接触 danger 多边形(用缩放后的像素点)
bool bottomTouchesDanger(const Dection &d, const ZoneBox &dangerBox)
{
vector<Point> poly;
poly.reserve(4);
for (auto &p : dangerBox.vertices)
{
poly.emplace_back(static_cast<int>(p.x * handleFrame.cols),
static_cast<int>(p.y * handleFrame.rows));
}
auto toPixX = [&](double v) -> int
{
return (v <= 1.0) ? static_cast<int>(v * handleFrame.cols) : static_cast<int>(v);
};
auto toPixY = [&](double v) -> int
{
return (v <= 1.0) ? static_cast<int>(v * handleFrame.rows) : static_cast<int>(v);
};
int x0 = toPixX(d.x);
int y0 = toPixY(d.y);
int wpx = toPixX(d.w);
int hpx = toPixY(d.h);
int x1 = x0 + wpx;
int yb = y0 + hpx;
int samples = max(5, wpx / 20);
for (int i = 0; i <= samples; ++i)
{
int x = x0 + (i * (x1 - x0)) / samples;
Point pt(x, yb);
double res = pointPolygonTest(poly, pt, false);
if (res >= 0)
return true;
}
return false;
}
// 从配置文件读取 zonesSAFE/WARN/DANG 的 4 点,归一化值)
bool LoadZonesFromEnv()
{
ReadFile rf(filePath);
if (!rf.Open())
{
cerr << "文件打开失败: " << filePath << endl;
return false;
}
auto lines = rf.ReadLines();
rf.Close();
auto getDouble = [&](const string &key, double &out)
{
for (auto &line : lines)
{
if (line.rfind(key + "=", 0) == 0)
{
try
{
out = stod(line.substr(key.size() + 1));
return true;
}
catch (...)
{
return false;
}
}
}
return false;
};
auto loadBox = [&](ZoneBox &box, const string &prefix)
{
box.name = prefix;
for (int i = 0; i < 4; ++i)
{
double x = 0.0, y = 0.0;
getDouble(QCL::format("{}_{}_X", prefix, i + 1), x);
getDouble(QCL::format("{}_{}_Y", prefix, i + 1), y);
box.vertices[i] = {x, y}; // 归一化值
}
};
loadBox(g_safe, "SAFE");
loadBox(g_warn, "WARN");
loadBox(g_dang, "DANG");
return true;
}
// 绘制 zones 多边形(将归一化坐标按当前帧尺寸缩放)
void drawZones(Mat &img)
{
auto drawPoly = [&](const ZoneBox &box, const Scalar &color)
{
vector<Point> pts;
pts.reserve(4);
for (auto &p : box.vertices)
{
int px = static_cast<int>(p.x * img.cols);
int py = static_cast<int>(p.y * img.rows);
pts.emplace_back(px, py);
}
polylines(img, pts, true, color, 2);
};
drawPoly(g_safe, Scalar(0, 255, 0)); // 绿色
drawPoly(g_warn, Scalar(0, 255, 255)); // 黄色
drawPoly(g_dang, Scalar(0, 0, 255)); // 红色
}
// 保存图片
void saveAlarmImage(const Mat &frame)
{
if (frame.empty())
{
cerr << "报警图片保存跳过: 帧为空" << endl;
return;
}
string fileName = warningPath + "alarm_" + getCurrentTimeStr() + ".jpg";
cout << "imgpath = " << fileName << endl;
if (!imwrite(fileName, frame))
cerr << "图片保存失败" << endl;
}
// 保存视频
void saveAlarmVideo(deque<Mat> bufferSnapshot)
{
if (bufferSnapshot.empty() || bufferSnapshot.front().empty())
{
cerr << "报警视频保存跳过: 缓冲为空" << endl;
return;
}
thread([bufferSnapshot]()
{
string fileName = videoPath + "alarm_" + getCurrentTimeStr() + ".mp4";
VideoWriter write;
int codec = write.fourcc('H', '2', '6', '4');
Size size = bufferSnapshot.front().size();
bool color = bufferSnapshot.front().channels() == 3;
if (!write.open(fileName, codec, FPS, size, color))
{
cerr << "视频文件打开失败: " << fileName << endl;
return;
}
for (auto &ii : bufferSnapshot)
{
if (!ii.empty())
write.write(ii);
}
write.release(); })
.detach();
}
// 获取当前时间字符串做文件名
string getCurrentTimeStr()
{
auto now = chrono::system_clock::now();
auto time_t_now = chrono::system_clock::to_time_t(now);
stringstream ss;
ss << put_time(localtime(&time_t_now), "%Y%m%d_%H%M%S");
return ss.str();
}
// 调用报警输出程序
void setGPIOLevel(int level)
{
string cmd = "echo 'orangepi' | sudo -S /home/orangepi/RKApp/GPIOSignal/bin/sendGpioSignal " + to_string(level);
system(cmd.c_str());
}
static bool RefreshDistanceConfig()
{
ReadFile rf(filePath);
if (!rf.Open())
return false;
auto lines = rf.ReadLines();
rf.Close();
int danger = g_cfg.danger.load();
int warn = g_cfg.warn.load();
int safe = g_cfg.safe.load();
bool opm = g_cfg.outPutMode.load();
for (auto &line : lines)
{
if (line.find("NEAR_THRESHOLD=") != string::npos)
danger = stoi(line.substr(sizeof("NEAR_THRESHOLD=") - 1));
else if (line.find("MID_THRESHOLD=") != string::npos)
warn = stoi(line.substr(sizeof("MID_THRESHOLD=") - 1));
else if (line.find("MAX_DISTANCE=") != string::npos)
safe = stoi(line.substr(sizeof("MAX_DISTANCE=") - 1));
else if (line.find("outPutMode:") != string::npos)
{
// 注意:你这里的 key/value 格式看起来不像 ENV 的 KEY=VALUE确认一下
string val = line.substr(sizeof("outPutMode:"));
opm = (val == "true");
}
}
g_cfg.danger.store(danger);
g_cfg.warn.store(warn);
g_cfg.safe.store(safe);
g_cfg.outPutMode.store(opm);
return true;
}
// 报警线程
void warnThread()
{
thread([]()
{
bool isAlarming = false;
auto lastDangerTime = chrono::steady_clock::now();
// 初次加载配置
RefreshDistanceConfig();
// 初次设置电平
int normalLevel = g_cfg.outPutMode.load() ? 0 : 1;
int alarmLevel = g_cfg.outPutMode.load() ? 1 : 0;
setGPIOLevel(normalLevel);
// 配置低频刷新(例如 1s 一次),避免每帧 IO
auto lastCfgRefresh = chrono::steady_clock::now();
uint64_t seenSeq = latestAlertSeq.load();
while (alertWorkerRunning.load())
{
// 等待新数据到来带超时即使YOLO不发消息也能走“离开后2秒恢复”
std::unique_lock<std::mutex> lk(latestAlertMutex);
bool gotNew = latestAlertCv.wait_for(lk, std::chrono::milliseconds(50), [&] {
return !alertWorkerRunning.load() || latestAlertSeq.load() != seenSeq;
});
if (!alertWorkerRunning.load())
break;
// 低频刷新配置(避免抖动)
auto now = chrono::steady_clock::now();
if (now - lastCfgRefresh >= chrono::seconds(1))
{
RefreshDistanceConfig();
lastCfgRefresh = now;
normalLevel = g_cfg.outPutMode.load() ? 0 : 1;
alarmLevel = g_cfg.outPutMode.load() ? 1 : 0;
}
// 如果超时没收到新消息:视为“当前无检测=离开危险区”
if (!gotNew)
{
if (isAlarming)
{
auto dur = chrono::duration_cast<chrono::milliseconds>(
chrono::steady_clock::now() - lastDangerTime).count();
if (dur >= 2000)
{
isAlarming = false;
setGPIOLevel(normalLevel);
}
}
continue;
}
// 有新消息:取最新检测
seenSeq = latestAlertSeq.load();
auto detsOpt = latestAlertDets;
lk.unlock();
if (!detsOpt.has_value())
continue;
bool currentFrameHasDanger = false;
const int dangerTh = g_cfg.danger.load();
// 判定是否危险(距离优先,否则用多边形)
for (const auto &d : detsOpt.value())
{
if (d.distance > 0.0 && d.distance <= dangerTh)
{
currentFrameHasDanger = true;
break;
}
if (d.distance == 0.0)
{
if (bottomTouchesDanger(d, g_dang))
{
currentFrameHasDanger = true;
break;
}
}
}
// 状态机:危险->保持报警;不危险->延迟2秒恢复
if (currentFrameHasDanger)
{
lastDangerTime = chrono::steady_clock::now();
if (!isAlarming)
{
isAlarming = true;
setGPIOLevel(alarmLevel);
// 保存媒体(你原逻辑保留)
{
lock_guard<mutex> lk2(bufferMutex);
Mat framToSave;
deque<Mat> bufferToSave;
if (!videoDeque.empty())
{
framToSave = videoDeque.back().clone();
bufferToSave = videoDeque;
}
else if (!handleFrame.empty())
{
framToSave = handleFrame.clone();
}
if (!framToSave.empty())
saveAlarmImage(framToSave);
if (!bufferToSave.empty())
saveAlarmVideo(bufferToSave);
}
}
}
else
{
if (isAlarming)
{
auto dur = chrono::duration_cast<chrono::milliseconds>(
chrono::steady_clock::now() - lastDangerTime).count();
if (dur >= 2000)
{
isAlarming = false;
setGPIOLevel(normalLevel);
}
}
}
} })
.detach();
}
// 获取报警距离
bool GetDistance()
{
// 获取距离信息
ReadFile rf(filePath);
if (rf.Open() == false)
{
cerr << "文件打开失败" << endl;
return false;
}
auto lines = rf.ReadLines();
string str;
for (auto &line : lines)
{
if (line.find("NEAR_THRESHOLD=") != string::npos)
dis.danger = stoi(line.substr(sizeof("NEAR_THRESHOLD=") - 1));
else if (line.find("MID_THRESHOLD=") != string::npos)
dis.warn = stoi(line.substr(sizeof("MID_THRESHOLD=") - 1));
else if (line.find("MAX_DISTANCE=") != string::npos)
dis.safe = stoi(line.substr(sizeof("MAX_DISTANCE=") - 1));
else if (line.find("outPutMode:") != string::npos)
{
// 确认输电平模式
string val = line.substr(sizeof("outPutMode:"));
outPutMode = (val == "true");
}
}
rf.Close();
return true;
}
// 绘制矩形方框和深度信息
void drawRect(double x, double y, double w, double h, double distance)
{
// 将归一化(0~1)或像素值统一转换为像素
auto toPixX = [&](double v) -> int
{
return (v <= 1.0) ? static_cast<int>(v * handleFrame.cols) : static_cast<int>(v);
};
auto toPixY = [&](double v) -> int
{
return (v <= 1.0) ? static_cast<int>(v * handleFrame.rows) : static_cast<int>(v);
};
int px = toPixX(x);
int py = toPixY(y);
int pw = toPixX(w);
int ph = toPixY(h);
// 边界裁剪,避免越界
px = std::max(0, std::min(px, handleFrame.cols - 1));
py = std::max(0, std::min(py, handleFrame.rows - 1));
pw = std::max(1, std::min(pw, handleFrame.cols - px));
ph = std::max(1, std::min(ph, handleFrame.rows - py));
Rect r(px, py, pw, ph);
Scalar sca(0, 255, 0);
if (!GetDistance())
{
sca = Scalar(0, 0, 0);
}
else if (distance <= dis.danger)
sca = Scalar(0, 0, 255);
else if (distance <= dis.warn)
sca = Scalar(0, 255, 255);
rectangle(handleFrame, r, sca, 2);
putText(handleFrame, to_string(distance), Point(px, py), FONT_HERSHEY_SIMPLEX, 0.35, Scalar(0, 0, 0));
}
// mqtt初始化
void MqttInit()
{
// 设置回调
client.set_connected_handler([](const string &cause)
{ cout << "连接成功" << endl; });
client.set_message_callback(getMsgCallback);
client.connect()->wait();
client.subscribe(Topic, Qos)->wait();
alertWorkerRunning = true;
// 开启报警线程
warnThread();
}
// mqtt接收订阅消息的回调不要起线程直接更新“最新结果”
void getMsgCallback(mqtt::const_message_ptr msg)
{
const std::string payload = msg->to_string();
try
{
auto json = nlohmann::json::parse(payload);
std::vector<Dection> dets;
dets.reserve(json.size());
for (const auto &ii : json)
{
Dection d;
d.x = static_cast<double>(ii.value("x", 0.0));
d.y = static_cast<double>(ii.value("y", 0.0));
d.w = static_cast<double>(ii.value("w", 0.0));
d.h = static_cast<double>(ii.value("h", 0.0));
d.distance = static_cast<double>(ii.value("distance", 0.0));
dets.push_back(d);
}
// 更新绘制用的 latestDection你原来的逻辑
{
lock_guard<mutex> lk(detMutex);
latestDection = dets; // 保留给画框使用;如要极致可改成 move + 双缓冲
}
// 更新报警用的 “最新结果”(覆盖旧的,避免队列堆积导致延迟)
{
std::lock_guard<std::mutex> lk(latestAlertMutex);
latestAlertDets = std::move(dets);
latestAlertSeq.fetch_add(1, std::memory_order_relaxed);
}
latestAlertCv.notify_one();
}
catch (const nlohmann::json::parse_error &e)
{
cerr << "JSON 解析错误: " << e.what() << "\n原始 payload: " << payload << "\n";
}
catch (const std::exception &e)
{
cerr << "处理消息异常: " << e.what() << "\n";
}
}
// 摄像头初始化
bool videoInit(VideoCapture &cap)
{
// 显式使用 V4L2避免走 GStreamer
if (cap.isOpened())
cap.release();
if (!cap.open("/dev/video10", cv::CAP_V4L2))
{
cerr << "摄像头打开失败:/dev/video10" << endl;
return false;
}
cap.set(CAP_PROP_FRAME_WIDTH, 640);
cap.set(CAP_PROP_FRAME_HEIGHT, 480);
cap.set(CAP_PROP_FPS, 30);
cap.set(CAP_PROP_BUFFERSIZE, 1);
// 尝试 MJPG若不支持则忽略
cap.set(CAP_PROP_FOURCC, VideoWriter::fourcc('M', 'J', 'P', 'G'));
double fccv = cap.get(CAP_PROP_FOURCC);
char fcc[5] = {(char)((int)fccv & 0xFF), (char)(((int)fccv >> 8) & 0xFF), (char)(((int)fccv >> 16) & 0xFF), (char)(((int)fccv >> 24) & 0xFF), 0};
cout << "摄像头初始化成功 分辨率=" << cap.get(CAP_PROP_FRAME_WIDTH)
<< "x" << cap.get(CAP_PROP_FRAME_HEIGHT)
<< " FPS=" << cap.get(CAP_PROP_FPS)
<< " FOURCC=" << fcc << endl;
return true;
}
// FFmpeg管道初始化
FILE *pipeInit()
{
FILE *pipe = popen(
"ffmpeg "
"-nostats -hide_banner -loglevel error "
"-f rawvideo -pixel_format bgr24 -video_size 640x480 -framerate 30 -i - "
"-c:v h264_rkmpp -rc_mode 2 -qp_init 32 -profile:v baseline -g 1 -bf 0 "
"-fflags nobuffer -flags low_delay "
"-rtsp_transport tcp -f rtsp rtsp://127.0.0.1:8554/stream",
"w");
if (!pipe)
{
cerr << "FFmpeg管道打开失败" << endl;
return nullptr;
}
// 设置无缓冲模式
setvbuf(pipe, NULL, _IONBF, 0);
cout << "FFmpeg管道初始化成功" << endl;
return pipe;
}
// 处理单帧
bool processFrame(VideoCapture &cap, FILE *pipe, Mat &frame, int64 &count, chrono::steady_clock::time_point &t0)
{
// 读取帧(失败不退出,短休眠重试)
if (!cap.read(frame) || frame.empty())
{
cerr << "读取帧失败,重试中..." << endl;
this_thread::sleep_for(50ms);
return true;
}
handleFrame = frame.clone();
vector<Dection> destCopy;
// 读取最新检测:短锁获取并将结果拷贝到本地变量
{
lock_guard<mutex> lk(detMutex);
destCopy = latestDection; // 复制到本地
latestDection.clear();
}
// 在主线程上进行绘制
for (const auto &ii : destCopy)
{
drawRect(ii.x, ii.y, ii.w, ii.h, ii.distance);
}
// 每3秒刷新一次区域坐标并绘制
static auto lastZonesRefresh = std::chrono::steady_clock::now();
auto now = std::chrono::steady_clock::now();
if (now - lastZonesRefresh >= std::chrono::seconds(5))
{
ReloadConfigIfChanged();
lastZonesRefresh = now;
}
// 绘制三类区域框并按需镜像/翻转(推流输出处理后画面)
SetMirror(handleFrame);
drawZones(handleFrame);
// 确保输出给 FFmpeg 的尺寸与像素格式BGR24, 640x480
Mat outFrame;
if (handleFrame.cols != 640 || handleFrame.rows != 480)
resize(handleFrame, outFrame, Size(640, 480));
else
outFrame = handleFrame;
// 写入管道(推流处理后画面)
fwrite(outFrame.data, 1, outFrame.total() * outFrame.elemSize(), pipe);
fflush(pipe);
// 短锁进行保存
{
lock_guard<mutex> lk(bufferMutex);
videoDeque.push_back(handleFrame.clone()); // 确保存入深拷贝
if (videoDeque.size() > MAX_BUFFER_SIZE)
videoDeque.pop_front();
}
return true;
}
// 主处理循环
void mainLoop(VideoCapture &cap, FILE *pipe)
{
int64 count = 0;
auto t0 = chrono::steady_clock::now();
Mat frame;
cout << "开始视频处理循环..." << endl;
// 创建全屏窗口
namedWindow("处理后的画面", WINDOW_NORMAL);
setWindowProperty("处理后的画面", WND_PROP_FULLSCREEN, WINDOW_FULLSCREEN);
// 获取屏幕尺寸(通过获取全屏窗口的实际大小)
cv::Rect windowRect = getWindowImageRect("处理后的画面");
Display *display = XOpenDisplay(nullptr);
int screen = DefaultScreen(display);
int width = DisplayWidth(display, screen);
int height = DisplayHeight(display, screen);
Mat displayFrame; // 用于存储缩放后的画面
while (mainRunning)
{
if (!processFrame(cap, pipe, frame, count, t0))
{
break;
}
// 将 handleFrame 缩放到全屏尺寸
resize(handleFrame, displayFrame, Size(width, height));
imshow("处理后的画面", displayFrame);
// imshow("csv", handleFrame);
// 检测退出键
if (cv::waitKey(1) == 'q')
{
cout << "用户请求退出" << endl;
alertWorkerRunning = false;
break;
}
}
}
// 资源清理
void cleanup(FILE *pipe, VideoCapture &cap)
{
// 防御式确保报警线程条件被唤醒
alertWorkerRunning = false;
latestAlertCv.notify_all();
alertcv.notify_all();
try
{
client.disconnect()->wait();
}
catch (const std::exception &e)
{
std::cerr << e.what() << '\n';
}
if (pipe)
{
pclose(pipe);
cout << "FFmpeg管道已关闭" << endl;
}
if (cap.isOpened())
{
cap.release();
cout << "摄像头已释放" << endl;
}
destroyAllWindows();
cout << "所有资源已清理完毕" << endl;
}