admin
2025-04-27 9280d221d473730e81738628d1b247131f500a64
PreProcessFn.cpp
@@ -9,7 +9,7 @@
#include <mysql/mysql.h> 
#include <future>
#include "iostream"
#include <hiredis.h>
#include <string>
#include <locale>
#include <codecvt>
@@ -332,6 +332,38 @@
    return size * nmemb;
}
string join(const vector<string>& sequence, const string& separator)
{
   std::string result;
   for (size_t i = 0; i < sequence.size(); ++i)
      result += sequence[i] + ((i != sequence.size() - 1) ? separator : "");
   return result;
}
/// <summary>
/// 写入数据至redis模块
/// </summary>
void fnWriteToRedis(redisContext*& context, string ipccode)
{
   redisReply* reply;
   //检测redis是否连接,报警写入redis
   if (!context == NULL && !context->err) {
      // 记录报警的时候,同时写入redis数据库
      std::string key = "camera::run::" + ipccode;
      std::string value = "1";
      //写入redis
      reply = (redisReply*)redisCommand(context, "SET %s %s", key.c_str(), value.c_str());
      if (reply == NULL) {
         printf("信息写入redis失败\n");
         redisFree(context);
      }
      freeReplyObject(reply);
   }
}
/// @brief 视频流拉取处理
/// @param _rtspUrl 视频流源地址
/// @param queJC 未处理的视频流帧队列
@@ -342,7 +374,7 @@
   int count = 0;
   long currentFrame = 1;
   long skip_frame_num = deley;//跳帧间隔设置(每1帧)
   long skip_frame_num = skipN;//跳帧间隔设置(每1帧)
   while (!asyncStop)
   {
@@ -455,6 +487,64 @@
   rtspStream.release();   
}
/// <summary>
/// 连接并校验 Redis 服务器密码
/// </summary>
/// <param name="server"></param>
/// <param name="port"></param>
/// <param name="password"></param>
/// <param name="context"></param>
/// <returns></returns>
bool connectToRedis(const char* server, int port, const char* password, redisContext*& context) {
   // 连接 Redis 服务器
   context = redisConnect(server, port);
   if (context == nullptr || context->err) {
      std::cerr << "连接redis服务端失败!" << std::endl;
      return false;
   }
   //// 校验密码
   //redisReply* reply = (redisReply*)redisCommand(context, "AUTH %s", password);
   //if (reply == NULL) {
   //   std::cerr << "连接校验失败!" << std::endl;
   //   redisFree(context);
   //   return false;
   //}
   //else {
   //   // 检查回复以确定是否鉴权成功
   //   if (reply->type == REDIS_REPLY_ERROR && strcmp(reply->str, "OK") != 0) {
   //      std::cerr << "密码错误: " << reply->str << std::endl;
   //      freeReplyObject(reply);
   //      redisFree(context);
   //      return false;
   //   }
   //   freeReplyObject(reply);
   //}
   return true;
}
std::string jsontostr(Json::Value& json)
{
   string return_str;
   if(!json.isNull()&&!json.empty())
   {
      for (int i = 0; i < json.size(); i++)
      {
         return_str += json[i].asString()+",";
      }
      if (!return_str.empty() && return_str.back() == ',') {
             return_str.pop_back();
       }
   }
   else
   {
      return_str = "";
   }
   return return_str;
}
/// @brief 视频帧处理函数
/// @param ipcsNode 摄像机节点
/// @param modelArray 模型列表
@@ -462,10 +552,11 @@
/// @param queJC2 处理完的视频流帧队列
void PreProcessFn::fnImRecognitionPro(const Json::Value& ipcsNode, Json::Value modelArray, queue<Mat>& queJC, queue<Mat>& queJC2)
{
   std::string ipccode = ipcsNode["ipccode"].asString();//摄像机编码
   std::string ipccode = ipcsNode["code"].asString();//摄像机编码
   std::string ipcname = ipcsNode["name"].asString();//摄像机名称
   std::string ipcrtsppath = ipcsNode["video"].asString();//视频rtsp源   
   // std::string skipN = ipcsNode["skipN"].asString();//跳帧
   // int skipN = ipcsNode["skipN"].asString();//跳帧
   //mysql连接初始化
   MYSQL* mysql = mysql_init(nullptr);
   if (!mysql_real_connect(mysql, mysqlpath.c_str(), mysqluser.c_str(), mysqlpass.c_str(), mysqldatabase.c_str(), stoi(mysqlport), NULL, 0))
@@ -480,6 +571,18 @@
      mysql_set_character_set(mysql, "utf8mb4");
      //mysql_query(mysql, "SET NAMES GB2312");//解决中文乱码问题       
   }   
   //初始化redis链接
   redisContext* context;
   if (!connectToRedis(redispath.c_str(), stoi(redisport), redispass.c_str(), context))
   {
      cout << "redis连接初始化失败!" << endl;
   }
   else
   {
      cout << "redis连接初始化成功!" << endl;
   }
   std::string limitDM = "1";//堆煤限定阈值,超过阈值进行报警,配置文件赋值,每个摄像头都不一样
@@ -497,6 +600,14 @@
   std::vector<Point>  areaDM;//堆煤检测区域   
   std::vector<Point>  areaWear;//穿戴检测区域
   std::vector<Point>  areaFire;//烟火检测区域
   std::vector<Point> detectarea;//检测区域
   std::vector<Point> flowarea;//皮带外围区域
   std::vector<Point> flowrelate;//皮带相对区域
   std::vector<Point> leftarea;//左侧区域
   std::vector<Point> rightarea;//右侧区域
   std::vector<Point> beltarea;//中间区域
   std::vector<Point> workarea;//工作区域
   //模型处理 ------第2步
   RUN rBelt;//皮带运行模型
@@ -626,152 +737,201 @@
   //提升机和皮带判断时,需要累计三张图片才能进行判断
   std::vector<cv::Mat> imagesTsj;
   std::vector<cv::Mat> imagesBelt;
   std::string color_result;
   std::string lable_title;
   
   cout <<modelArray.size()<<endl;
   //循环每个摄像头的模型,加载模型
   for (int j = 0; j < modelArray.size(); ++j) {
      Json::Value modelNode = modelArray[j];
      std::string modelPath = modelNode["modelpath"].asString();
      std::string modelPath2 = modelNode["modelpath2"].asString();//模型2路径
      std::string imagePath = modelNode["imagepath"].asString();
      std::string modelCode = modelNode["code"].asString();//模型编码 区分加载不同的模型
      std::string modelPath = modelNode["path"].asString();
      std::string modelPath2 = modelNode["path2"].asString();//模型2路径
      std::string imagePath = modelNode["img_path"].asString();
      std::string modelName = modelNode["name"].asString();
      std::string modelType = modelNode["type"].asString();//模型分析类型编码
      std::string modelAnalysis = modelNode["analysis"].asString();//模型分析结果编码
      std::string leftarea = modelNode.get("leftarea", "").asString();//皮带左侧区域
      std::string rightarea = modelNode.get("rightarea", "").asString();//皮带右侧区域
      std::string area = modelNode.get("area", "").asString();//区域一
      std::string area2 = modelNode.get("area2", "").asString();//区域二
      std::string modelLimit = modelNode["limit"].asString();
      std::string modelLimit2 = modelNode.get("limit2", "").asString();//皮带空载限定值
      std::string modelId = modelNode["modelid"].asString();//模型id,区分加载不同的模型
      Json::Value modelTypeArr = modelNode["type"];////模型分析类型编码
      std::string modelType = jsontostr(modelTypeArr);
      double threshold = stod(modelNode["threshold"].asString());//阈值
      float modelLimit = stof(modelNode["limit"].asString());//限定值
      std::string modelAnalysis ="";
      color_result = modelNode["color_result"].asString();//颜色反正值
      // std::string area = modelNode["threshold"].asString();//区域一
      // std::string area2 = modelNode.get("area2", "").asString();//区域二
      Json::Value rects = modelNode["point_rects"];////模型分析类型编码
      for (auto k = 0; k < rects.size(); ++k)
         {
            Json::Value pointNode = rects[k];
            lable_title=pointNode["title"].asString();
            std::string point_type=pointNode["type"].asString();
            Json::Value point_Arr= pointNode["points"];
            for (auto x = 0; x < point_Arr.size(); ++x)
            {
               int croodx = stoi(point_Arr[x]["x"].asString());
               int croody = stoi(point_Arr[x]["y"].asString());
               if(point_type=="detectarea")
               {
                  detectarea.push_back(cv::Point(croodx, croody));
               }
               if (point_type=="flowarea")
               {
                  flowarea.push_back(cv::Point(croodx, croody));
               }
               if (point_type=="flowrelate")
               {
                  flowrelate.push_back(cv::Point(croodx, croody));
               }
               if (point_type=="leftroller")
               {
                  leftarea.push_back(cv::Point(croodx, croody));
               }
               if (point_type=="rightroller")
               {
                  rightarea.push_back(cv::Point(croodx, croody));
               }
               if (point_type=="beltarea")
               {
                  beltarea.push_back(cv::Point(croodx, croody));
               }
               if (point_type=="workarea")
               {
                  workarea.push_back(cv::Point(croodx, croody));
               }
            }
         }
      int* arr = new int[detectarea.size() * 2];
      //加载模型处理------
      switch (stoi(modelId))
      if(modelCode=="1") //皮带状态
      {
      case 1://皮带状态
         if (!rBelt.initConfig(modelPath.c_str(), stringToIntArray(area)))
         {
            printf("皮带运行模型初始化失败。\n");
         }
         else
         {
            printf("皮带运行模型初始化成功。\n");
            rBelt.isLoad = true;
         }
         areaBelt = stringToPoints(area);
         videoPathBelt = coalCode + ipccode + modelType + modelAnalysis;
         wfBelt.setCoalCode(coalCode);
         wfBelt.setCameraCode(ipccode);
         wfBelt.setCameraName(ipcname);
         wfBelt.setRtspUrl(ipcrtsppath);
         wfBelt.setAnalyse(modelType);
         wfBelt.setAnalyseResult(modelAnalysis);
         break;
      case 2://提升机运行检测模型
         if (!rTSJ.initConfig(modelPath.c_str(), stringToIntArray(area)))
         {
            printf("提升机模型初始化失败。\n");
         }
         else
         {
            printf("提升机模型初始化成功。\n");
            rTSJ.isLoad = true;
         }
         areaTSJ = stringToPoints(area);
         videoPathTSJ = coalCode + ipccode + modelType;
         wfTSJ.setCoalCode(coalCode);
         wfTSJ.setCameraCode(ipccode);
         wfTSJ.setCameraName(ipcname);
         wfTSJ.setRtspUrl(ipcrtsppath);
         wfTSJ.setAnalyse(modelType);
         wfTSJ.setAnalyseResult(modelAnalysis);
         break;
      case 3://区域闯入模型
            if (!rBelt.initConfig(modelPath.c_str(),arr))
            {
               printf("皮带运行模型初始化失败。\n");
            }
            else
            {
               printf("皮带运行模型初始化成功。\n");
               rBelt.isLoad = true;
            }
            areaBelt =detectarea;
            videoPathBelt = coalCode + ipccode + modelType + modelAnalysis;
            wfBelt.setCoalCode(coalCode);
            wfBelt.setCameraCode(ipccode);
            wfBelt.setCameraName(ipcname);
            wfBelt.setRtspUrl(ipcrtsppath);
            wfBelt.setAnalyse(modelType);
            //wfBelt.setAnalyseResult(modelAnalysis);
      }
      if(modelCode=="2") //提升机运行检测模型
      {
         if (!rTSJ.initConfig(modelPath.c_str(), arr))
            {
               printf("提升机模型初始化失败。\n");
            }
            else
            {
               printf("提升机模型初始化成功。\n");
               rTSJ.isLoad = true;
            }
            areaTSJ = detectarea;
            videoPathTSJ = coalCode + ipccode + modelType;
            wfTSJ.setCoalCode(coalCode);
            wfTSJ.setCameraCode(ipccode);
            wfTSJ.setCameraName(ipcname);
            wfTSJ.setRtspUrl(ipcrtsppath);
            wfTSJ.setAnalyse(modelType);
            wfTSJ.setAnalyseResult(modelAnalysis);
            break;
      }
      if(modelCode=="3") //人员区域闯入模型
      {
         if (!hatJC.initConfig(modelPath.c_str(), 0.5, 0.5))
         {
            printf("目标检测模型初始化失败\n");
         }
         else
         {
            printf("目标检测模型初始化成功。\n");
            hatJC.isLoad = true;
         }
         areaPerson = stringToPoints(area);//危险区域
         workareaPerson = stringToPoints(area2);//工作区域
         videoPathPerson = coalCode + ipccode + modelType + modelAnalysis;
         wfPerson.setCoalCode(coalCode);
         wfPerson.setCameraCode(ipccode);
         wfPerson.setCameraName(ipcname);
         wfPerson.setRtspUrl(ipcrtsppath);
         wfPerson.setAnalyse(modelType);
         wfPerson.setAnalyseResult(modelAnalysis);
         break;
      case 4://摄像头遮挡算法
            {
               printf("目标检测模型初始化失败\n");
            }
            else
            {
               printf("目标检测模型初始化成功。\n");
               hatJC.isLoad = true;
            }
            areaPerson = detectarea;//危险区域
            workareaPerson = workarea;//工作区域
            videoPathPerson = coalCode + ipccode + modelType + modelAnalysis;
            wfPerson.setCoalCode(coalCode);
            wfPerson.setCameraCode(ipccode);
            wfPerson.setCameraName(ipcname);
            wfPerson.setRtspUrl(ipcrtsppath);
            wfPerson.setAnalyse(modelType);
            wfPerson.setAnalyseResult(modelAnalysis);
      }
      if(modelCode=="4") //摄像头遮挡算法
      {
         if (!cover.initConfig()) {
            printf("摄像头遮挡算法初始化失败\n");
         }
         else
         {
            std::cout << "摄像头遮挡算法初始化成功" << std::endl;
            cover.isLoad = true;
         }
         videoPathCameraCover = coalCode + ipccode + modelType + modelAnalysis;
         wfCameraCover.setCoalCode(coalCode);
         wfCameraCover.setCameraCode(ipccode);
         wfCameraCover.setCameraName(ipcname);
         wfCameraCover.setRtspUrl(ipcrtsppath);
         wfCameraCover.setAnalyse(modelType);
         wfCameraCover.setAnalyseResult(modelAnalysis);
         break;
      case 5://摄像头移动算法
               printf("摄像头遮挡算法初始化失败\n");
            }
            else
            {
               std::cout << "摄像头遮挡算法初始化成功" << std::endl;
               cover.isLoad = true;
            }
            videoPathCameraCover = coalCode + ipccode + modelType + modelAnalysis;
            wfCameraCover.setCoalCode(coalCode);
            wfCameraCover.setCameraCode(ipccode);
            wfCameraCover.setCameraName(ipcname);
            wfCameraCover.setRtspUrl(ipcrtsppath);
            wfCameraCover.setAnalyse(modelType);
            wfCameraCover.setAnalyseResult(modelAnalysis);
      }
      if(modelCode=="5") //摄像头移动算法
      {
         if (!camera.initConfig()) {
            printf("摄像头移动算法初始化失败\n");
         }
         else
         {
            std::cout << "摄像头移动算法初始化成功" << std::endl;
            camera.isLoad = true;
            camera.stdMat = cv::imread(imagePath);
         }
         videoPathCameraMove = coalCode + ipccode + modelType + modelAnalysis;
         wfCameraMove.setCoalCode(coalCode);
         wfCameraMove.setCameraCode(ipccode);
         wfCameraMove.setCameraName(ipcname);
         wfCameraMove.setRtspUrl(ipcrtsppath);
         wfCameraMove.setAnalyse(modelType);
         wfCameraMove.setAnalyseResult(modelAnalysis);
         break;
               printf("摄像头移动算法初始化失败\n");
            }
            else
            {
               std::cout << "摄像头移动算法初始化成功" << std::endl;
               camera.isLoad = true;
               camera.stdMat = cv::imread(imagePath);
            }
            videoPathCameraMove = coalCode + ipccode + modelType + modelAnalysis;
            wfCameraMove.setCoalCode(coalCode);
            wfCameraMove.setCameraCode(ipccode);
            wfCameraMove.setCameraName(ipcname);
            wfCameraMove.setRtspUrl(ipcrtsppath);
            wfCameraMove.setAnalyse(modelType);
            wfCameraMove.setAnalyseResult(modelAnalysis);
      }
      if(modelCode=="6") //堆煤检测模型
      {
         if (!dm.initConfig(modelPath.c_str(), arr)) {
               printf("堆煤模型初始化失败\n");
            }
            else
            {
               std::cout << "堆煤模型初始化成功" << std::endl;
               dm.isLoad = true;
            }
            videoPathDM = coalCode + ipccode + modelType + modelAnalysis;
            areaDM = detectarea;
            limitDM = modelLimit;//堆煤限定煤量占比
      case 6://堆煤检测模型
         if (!dm.initConfig(modelPath.c_str(), stringToIntArray(area))) {
            printf("堆煤模型初始化失败\n");
         }
         else
         {
            std::cout << "堆煤模型初始化成功" << std::endl;
            dm.isLoad = true;
         }
         videoPathDM = coalCode + ipccode + modelType + modelAnalysis;
         areaDM = stringToPoints(area2);
         limitDM = modelLimit;//堆煤限定煤量占比
            wfDM.setCoalCode(coalCode);
            wfDM.setCameraCode(ipccode);
            wfDM.setCameraName(ipcname);
            wfDM.setRtspUrl(ipcrtsppath);
            wfDM.setAnalyse(modelType);
            wfDM.setAnalyseResult(modelAnalysis);
      }
      if(modelCode=="7") //皮带跑偏和异物检测
      {
         int* leftarr = new int[leftarea.size() * 2];
         int* rightarr = new int[rightarea.size() * 2];
         int* midd_arr = new int[beltarea.size() * 2];
         wfDM.setCoalCode(coalCode);
         wfDM.setCameraCode(ipccode);
         wfDM.setCameraName(ipcname);
         wfDM.setRtspUrl(ipcrtsppath);
         wfDM.setAnalyse(modelType);
         wfDM.setAnalyseResult(modelAnalysis);
         break;
      case 7://皮带跑偏和异物检测
         if (!beltJC.initConfig(modelPath.c_str(), stringToIntArray(leftarea), stringToIntArray(rightarea), stringToIntArray(area),stof(modelLimit)))
         if (!beltJC.initConfig(modelPath.c_str(), leftarr, rightarr, midd_arr, modelLimit))
         {
            printf("皮带跑偏和异物检测模型初始化失败\n");
         }
@@ -780,9 +940,9 @@
            printf("皮带跑偏和异物检测模型初始化成功。\n");
            beltJC.isLoad = true;
         }
         leftareaBeltJC = stringToPoints(leftarea);
         rightareaBeltJC = stringToPoints(rightarea);
         areaBeltJC = stringToPoints(area);
         leftareaBeltJC = leftarea;
         rightareaBeltJC = rightarea;
         areaBeltJC = beltarea;
         videoPathBeltJC = coalCode + ipccode + modelType + modelAnalysis;
         wfBeltJC.setCoalCode(coalCode);
         wfBeltJC.setCameraCode(ipccode);
@@ -790,8 +950,9 @@
         wfBeltJC.setRtspUrl(ipcrtsppath);
         wfBeltJC.setAnalyse(modelType);
         wfBeltJC.setAnalyseResult(modelAnalysis);
         break;
      case 8://猴车大物件
      }
      if(modelCode=="8") //猴车大物件
      {
         if (!hatHC.initConfig(modelPath.c_str(), 0.5, 0.5))
         {
            printf("乘猴车携带大件模型初始化失败\n");
@@ -801,7 +962,7 @@
            printf("乘猴车携带大件模型初始化成功。\n");
            hatHC.isLoad = true;
         }
         areaHC = stringToPoints(area);
         areaHC = detectarea;
         videoPathHC = coalCode + ipccode + modelType + modelAnalysis;
         wfHC.setCoalCode(coalCode);
         wfHC.setCameraCode(ipccode);
@@ -809,9 +970,10 @@
         wfHC.setRtspUrl(ipcrtsppath);
         wfHC.setAnalyse(modelType);
         wfHC.setAnalyseResult(modelAnalysis);
         break;
      }
      case 9://睡岗判断
      if (modelCode == "9")//睡岗判断
      {
         if (!hatSleep.initConfig(modelPath.c_str(), 0.5, 0.5))
         {
            printf("睡岗模型初始化失败\n");
@@ -821,7 +983,7 @@
            printf("睡岗模型初始化成功。\n");
            hatSleep.isLoad = true;
         }
         areaSleep = stringToPoints(area);
         areaSleep = detectarea;
         videoPathSleep = coalCode + ipccode + modelType + modelAnalysis;
         wfSleep.setCoalCode(coalCode);
         wfSleep.setCameraCode(ipccode);
@@ -829,9 +991,9 @@
         wfSleep.setRtspUrl(ipcrtsppath);
         wfSleep.setAnalyse(modelType);
         wfSleep.setAnalyseResult(modelAnalysis);
         break;
      case 11://穿戴检测,初始化两个模型
      }
      if (modelCode == "10")//穿戴检测,初始化两个模型
      {
         if (!hatWear.initConfig(modelPath.c_str(), 0.5, 0.5))
         {
            printf("穿戴模型1初始化失败\n");
@@ -849,7 +1011,7 @@
            }
            hatWear.isLoad = true;
         }
         areaWear = stringToPoints(area);
         areaWear = detectarea;
         videoPathWear = coalCode + ipccode + modelType + modelAnalysis;
         wfWear.setCoalCode(coalCode);
         wfWear.setCameraCode(ipccode);
@@ -857,10 +1019,9 @@
         wfWear.setRtspUrl(ipcrtsppath);
         wfWear.setAnalyse(modelType);
         wfWear.setAnalyseResult(modelAnalysis);
         break;
      case 12://烟火模型
      }
      if (modelCode == "11")//烟火模型
      {
         if (!hatFire.initConfig(modelPath.c_str())) {
            printf("烟火模型初始化失败\n");
         }
@@ -869,7 +1030,7 @@
            printf("烟火模型初始化成功。\n");
            hatFire.isLoad = true;
         }
         areaFire = stringToPoints(area);
         areaFire = detectarea;
         videoPathFire = coalCode + ipccode + modelType + modelAnalysis;
         wfFire.setCoalCode(coalCode);
         wfFire.setCameraCode(ipccode);
@@ -877,9 +1038,10 @@
         wfFire.setRtspUrl(ipcrtsppath);
         wfFire.setAnalyse(modelType);
         wfFire.setAnalyseResult(modelAnalysis);
         break;
      default:
         break;
      }
      if (modelCode == "12")//人员跌倒
      {
      }
   }   
@@ -891,7 +1053,7 @@
   Mat iMatWear;//人员穿戴的第一帧
   long currentFrame = 1;//记录当前帧数
   long skip_frame_num = deley;//跳帧间隔设置
   long skip_frame_num = skipN;//跳帧间隔设置
   Mat frame;
   while (!asyncStop) 
   {      
@@ -930,7 +1092,7 @@
            //人员闯入模型处理及判断 modelId = 3
            if (hatJC.isLoad)
            {
               futurePerson = std::async(std::launch::async, &PreProcessModel::fnImRecProByModelHAT, &ppm, std::ref(frame), std::ref(hatJC), areaPerson,workareaPerson);
               futurePerson = std::async(std::launch::async, &PreProcessModel::fnImRecProByModelHAT1, &ppm, std::ref(frame), std::ref(hatJC), areaPerson,workareaPerson,c_list,lable_title,color_result);
            }
            //摄像头移动和遮挡专用
@@ -1629,7 +1791,9 @@
            currentFrame = 0;
         }
         currentFrame++;
         currentFrame++;
         fnWriteToRedis(context,ipccode);
      }
      catch (const std::exception& ex)
      {         
@@ -1659,14 +1823,14 @@
   Mat frame;
   long currentFrame = 1;
   long skip_frame_num = deley;//跳帧间隔设置(每1帧)
   long skip_frame_num = skipN;//跳帧间隔设置(每1帧)
   //创建url连接,定时判断有没有页面进行拉流
   CURL* curl;
    CURLcode res;
    std::string readBuffer;
   curl = curl_easy_init();
   bool pushflg = false;//是否推流标志
   bool pushflg = true;//是否推流标志
 
   if(curl)
   {
@@ -1745,17 +1909,7 @@
            //cv::resize(frame, frame, Size(1280, 720));
            imgStr = Mat2Base64(frame, ".jpg");//输出Base64编码的字符串            
            //std::cout << "Base64 size: " << imgStr.size() << std::endl;
            // // 编码为JPEG格式的二进制数据
            // std::vector<uchar> encoded;
            // std::vector<int> params = {cv::IMWRITE_JPEG_QUALITY, 90};
            // cv::imencode(".jpg", frame, encoded, params);
            // imgStr = std::string(reinterpret_cast<const char*>(encoded.data()), encoded.size());
            // std::cout << "Binary size: " << imgStr.size() << std::endl;
            //cout << getCurrentDateTime("%Y%m%d%H%M%S") << endl;
            //std::cout << "Base64 size: " << imgStr.size() << std::endl;
            objRabbitmq.Publish(imgStr, ipccode, "");
            
            //cv::resize(frame, frame, Size(1280, 720));      
@@ -1796,6 +1950,37 @@
   //定义接受帧
   Mat frame;
   vector<string> arguments = {
      "ffmpeg "
      "-hwaccel","cuvid",
      "-hwaccel_output_format","cuda",
      "-y", "-an",
      "-f", "rawvideo",
      "-vcodec", "rawvideo",
      "-pix_fmt", "bgr24",
      "-s",  "640x480",
      "-r", "15",
      "-i", "-",
      "-pix_fmt", "yuv420p",
      "-f", "flv",
      "-max_delay", "1000",
      "-flvflags", "no_duration_filesize",
      "-c:v","h264_nvenc",
      "-b:v", "3M",
      "-g:v", "15",
      "-bf", "0",
      "-bufsize", "50000000",
      "-rtbufsize", "50000000",
      "rtmp://192.168.1.8:1935/live/camera1" };
   string ffmpeg_command = join(arguments, " ");
   // 打开FFmpeg进程
   FILE* pipe = popen(ffmpeg_command.c_str(), "w");
   if (!pipe) {
      std::cerr << "无法启动FFmpeg" << std::endl;
   }
   while (!asyncStop) {
      //std::cout << ipccode + "当前线程Rabbitmq数据数量2222:" << queJC2.size() << std::endl;      
      try
@@ -1814,20 +1999,92 @@
         queJC2.pop();
         if (frame.empty())//帧为空,则舍弃
            continue;
         // 将帧写入到FFmpeg管道中
         fwrite(frame.data, 1, frame.total() * frame.elemSize(), pipe);
         //cv::resize(frame, frame, Size(1280, 720));
         //cv::imshow(ipccode, frame);//展示帧图片
         //waitKey(10);
         //std::this_thread::sleep_for(std::chrono::milliseconds(10));// 延迟等待一段时间
      }
      catch (const std::exception& ex)
      {
         std::string errorMessage = "保存到RabbitMQ失败:-";
         errorMessage += ex.what();
         cout << errorMessage << endl;
         continue;
      }
   }
   frame.release();
}
/// @brief 推流到流媒体服务器
/// @param queJC2 处理完的视频流帧队列
/// @param ipccode 摄像机编号
void PreProcessFn::fnPushVideoToUrl(queue<Mat>& queJC2,string toRtsp, string fps, string ipccode)
{
   //定义接受帧
   Mat frame;
   vector<string> arguments = {
      "ffmpeg "
      "-hwaccel","cuvid",
      "-hwaccel_output_format","cuda",
      "-y", "-an",
      "-f", "rawvideo",
      "-vcodec", "rawvideo",
      "-pix_fmt", "bgr24",
      "-s",  "1280x720",
      "-r", fps,
      "-i", "-",
      "-pix_fmt", "yuv420p",
      "-f", "flv",
      "-max_delay", "1000",
      "-flvflags", "no_duration_filesize",
      "-c:v","h264_nvenc",
      "-b:v", "3M",
      "-g:v", "15",
      "-bf", "0",
      "-bufsize", "50000000",
      "-rtbufsize", "50000000",
      toRtsp };
   string ffmpeg_command = join(arguments, " ");
   // 打开FFmpeg进程
   FILE* pipe = popen(ffmpeg_command.c_str(), "w");
   if (!pipe) {
      std::cerr << "无法启动FFmpeg" << std::endl;
   }
   while (!asyncStop) {
      //std::cout << ipccode + "当前线程Rabbitmq数据数量2222:" << queJC2.size() << std::endl;
      try
      {
         if (queJC2.size() > 500)
         {
            cout << "推送视频时,存在内存溢出风险!" << endl;
         }
         if (queJC2.empty() || queJC2.size() == 0)
         {
            continue;
         // FFmpeg推流命令
         std::string ffmpeg_command =
            "ffmpeg -y -f rawvideo -pixel_format bgr24 -video_size 640x480 "
              "-framerate 30 -i - -c:v libx264 -pix_fmt yuv420p -f rtsp "
                "rtsp://192.168.1.188:8554/live/stream";
         // 打开FFmpeg进程
         FILE* pipe = popen(ffmpeg_command.c_str(), "w");
         if (!pipe) {
            std::cerr << "无法启动FFmpeg" << std::endl;
         }
         frame = queJC2.front();
         queJC2.pop();
         if (frame.empty())//帧为空,则舍弃
            continue;
         // 将帧写入到FFmpeg管道中
         fwrite(frame.data, 1, frame.total() * frame.elemSize(), pipe);