乐趣区

关于qt:视觉实战案例QtOpenCV实现USB摄像头监测移动物体并录制视频功能帧差法

1、背景介绍

最近手边的零食总是莫名其妙的缩小,为了抓到一个首恶来帮我续零食,就想着应用手边的 usb 摄像头来实现一个动静物体监测和保留视频的性能,不过这里应用最简略的帧差法来实现物体的静止监测。

2、应用 OpenCV 的帧差法实现静止物体监测

  • 开发环境
    Qt5.9 + OpenCV
  • 硬件
    Logitech 摄像头

    2.1 帧差法介绍

    静止物体图像在相邻两帧间差异较大,两帧差值后进行简略的图像处理,较容易判断是否存在物体挪动,相似于剪纸动画,本例中应用帧差后判断阈值宰割后的面积来确定是否存在物体静止。帧差法用前一帧图像作为以后帧的背景模型具备较好的实时性,其背景不积攒,且更新速度快、算法简略、计算量小。算法的有余在于对环境噪声较为敏感,阈值的抉择相当要害,抉择过低不足以克制图像中的噪声,过高则疏忽了图像中有用的变动。对于比拟大的、色彩统一的静止指标,有可能在指标外部产生空洞,无奈残缺地提取静止指标。

    2.2 帧差法局部实现代码

    将以后帧图像和上一帧图像进行灰度化,而后高斯滤波后做图像差值,选定适合的二值化阈值宰割,最初对宰割解决的区域面积进行断定。

          Mat grayframePre,frameDet;
          Mat frameNow,grayframeNow;
          cvtColor(matFrame,grayframeNow,COLOR_RGB2GRAY);
          cvtColor(framePre,grayframePre,COLOR_RGB2GRAY);
          GaussianBlur(grayframeNow,grayframeNow,Size(21,21),0,0);
          GaussianBlur(grayframePre,grayframePre,Size(21,21),0,0);
          absdiff(grayframeNow,grayframePre,frameDet);
          framePre = matFrame;
          threshold(frameDet,frameDet,20,255,THRESH_BINARY);
          Mat element = getStructuringElement(0,Size(3,3));
          vector<vector<Point>> contours;
          dilate(frameDet,frameDet,element);
          findContours(frameDet,contours,RETR_TREE,CHAIN_APPROX_SIMPLE,Point());
          qDebug()<<"Num"<<contours.size();
          QString SavePath = "D:/ImgPath/" + QString::number(VideoNum) + "_track.avi";
          if(contours.size()==0)
          {if(writer.isOpened())
              {writer.release();
              }
              if(isSaveFrame)
              {
                  isSaveFrame = false;
                  VideoNum++;
              }
          }
          else
          {for(int i=0;i<contours.size();i++)
              {double area = contourArea(contours[i]);
                  if(area < 100)continue;
                  else
                  {qDebug()<<"有物体静止!";
                      if(!isSaveFrame)
                      {int fourcc = writer.fourcc('M', 'J', 'P', 'G');
                          writer.open(SavePath.toStdString(),fourcc,10,Size(frameWidth,frameHeight),true);
                          isSaveFrame = true;
                      }
                      else
                      {writer.write(matFrame);
                      }
                      break;
                  }
              }
          }
      }
      else
      {framePre = matFrame;}

    3、在 Qt 平台下应用 opencv 对静止物体进行监测

    widget.h

    #ifndef WIDGET_H
    #define WIDGET_H
    
    #include <QWidget>
    #include "opencv2/opencv.hpp"
    #include <QTimer>
    
    using namespace cv;
    
    namespace Ui {class Widget;}
    
    class Widget : public QWidget
    {
      Q_OBJECT
    
    public:
      explicit Widget(QWidget *parent = 0);
      ~Widget();
    
    private slots:
      void on_btnOpenVedio_clicked();
      void on_btnQuit_clicked();
      void readFrame();
    
      void on_ckb_Track_clicked(bool checked);
    
    private:
      Ui::Widget *ui;
    
      bool openCam;
      bool isTrack=false;
      bool isSaveFrame = false;
      QTimer *timer;
      VideoCapture *cap;
      Mat framePre;
      int fps,frameWidth,frameHeight;
      VideoWriter writer;
    
      int VideoNum = 0;
    
    
      //Mat 转换 QImage
      QImage cvMat2QImage(const cv::Mat& mat);
    };
    
    #endif // WIDGET_H

    widget.cpp

    #pragma execution_character_set("utf-8")
    #include "widget.h"
    #include "ui_widget.h"
    #include <iostream>
    #include <QDebug>
    using namespace std;
    
    Widget::Widget(QWidget *parent) :
      QWidget(parent),
      ui(new Ui::Widget)
    {ui->setupUi(this);
      timer = new QTimer(this);
      timer->stop();
      connect(timer,SIGNAL(timeout()),this,SLOT(readFrame()));
      openCam = true;
    
      cap = new VideoCapture(0);
      frameWidth = cap->get(CAP_PROP_FRAME_WIDTH);
      frameHeight = cap->get(CAP_PROP_FRAME_HEIGHT);
      fps = cap->get(CAP_PROP_FPS);
      qDebug()<<"width"<<frameWidth<<frameHeight<<fps;}
    
    Widget::~Widget()
    {delete ui;}
    
    void Widget::on_btnOpenVedio_clicked()
    {if(openCam)
      {ui->btnOpenVedio->setText("敞开摄像头");
          timer->start(30);
      }
      else {ui->btnOpenVedio->setText("关上摄像头");
          timer->stop();}
      openCam = !openCam;
    }
    
    
    QImage Widget::cvMat2QImage(const cv::Mat &mat)
    {switch ( mat.type() )
      {
      // 8-bit  4 channel
      case CV_8UC4:
      {QImage image( (const uchar*)mat.data, mat.cols, mat.rows, static_cast<int>(mat.step), QImage::Format_RGB32 );
          return image;
      }
    
          // 8-bit  3 channel
      case CV_8UC3:
      {QImage image( (const uchar*)mat.data, mat.cols, mat.rows, static_cast<int>(mat.step), QImage::Format_RGB888 );
          return image.rgbSwapped();}
    
          // 8-bit  1 channel
      case CV_8UC1:
      {
          static QVector<QRgb>  sColorTable;
          // only create our color table once
          if (sColorTable.isEmpty() )
          {sColorTable.resize( 256);
              for (int i = 0; i < 256; ++i)
              {sColorTable[i] = qRgb(i, i, i);
              }
          }
          QImage image((const uchar*)mat.data, mat.cols, mat.rows, static_cast<int>(mat.step), QImage::Format_Indexed8 );
          image.setColorTable(sColorTable);
          return image;
      }
    
      default:
          qDebug("Image format is not supported: depth=%d and %d channels\n", mat.depth(), mat.channels());
          qWarning() << "cvMatToQImage - cv::Mat image type not handled in switch:" << mat.type();
          break;
      }
    
      return QImage();}
    
    
    void Widget::on_btnQuit_clicked()
    {timer->stop();
      cap->release();
      close();}
    
    void Widget::readFrame()
    {
      Mat matFrame;
      cap->read(matFrame);
      if(isTrack)
      {
          Mat grayframePre,frameDet;
          Mat frameNow,grayframeNow;
          cvtColor(matFrame,grayframeNow,COLOR_RGB2GRAY);
          cvtColor(framePre,grayframePre,COLOR_RGB2GRAY);
          GaussianBlur(grayframeNow,grayframeNow,Size(21,21),0,0);
          GaussianBlur(grayframePre,grayframePre,Size(21,21),0,0);
          absdiff(grayframeNow,grayframePre,frameDet);
          framePre = matFrame;
          threshold(frameDet,frameDet,20,255,THRESH_BINARY);
          Mat element = getStructuringElement(0,Size(3,3));
          vector<vector<Point>> contours;
          dilate(frameDet,frameDet,element);
          findContours(frameDet,contours,RETR_TREE,CHAIN_APPROX_SIMPLE,Point());
          qDebug()<<"Num"<<contours.size();
          QString SavePath = "D:/ImgPath/" + QString::number(VideoNum) + "_track.avi";
          if(contours.size()==0)
          {if(writer.isOpened())
              {writer.release();
              }
    
              if(isSaveFrame)
              {
                  isSaveFrame = false;
                  VideoNum++;
              }
          }
          else
          {for(int i=0;i<contours.size();i++)
              {double area = contourArea(contours[i]);
                  if(area < 100)continue;
                  else
                  {qDebug()<<"有物体静止!";
                      if(!isSaveFrame)
                      {int fourcc = writer.fourcc('M', 'J', 'P', 'G');
                          writer.open(SavePath.toStdString(),fourcc,10,Size(frameWidth,frameHeight),true);
                          isSaveFrame = true;
                      }
                      else
                      {writer.write(matFrame);
                      }
                      break;
                  }
              }
          }
      }
      else
      {framePre = matFrame;}
      QImage Qimg = cvMat2QImage(matFrame);
      ui->picshow->setPixmap(QPixmap::fromImage(Qimg));
    
    }
    
    void Widget::on_ckb_Track_clicked(bool checked)
    {if(checked)
      {isTrack = true;}
      else {isTrack = false;}
    
    }

    4、界面成果展现


    关上摄像头后,能够进行采集视频操作,勾选“关上追踪”,程序会调用帧差算法断定是否有静止物体,如果有物体静止,就保留静止时的视频。

5、总结

首先,两帧差是比拟根底的检测静止物体的办法,尽管其运算速度快,但其无奈过滤光照或渺小抖动的烦扰,而且静止指标会呈现“重影”导致呈现外部空洞。三帧差法是在相邻帧差法根底上改良的算法,在肯定水平上优化了静止物体双边,粗轮廓的景象,相比之下,三帧差法比相邻帧差法更实用于物体挪动速度较快的状况,比方路线上车辆的智能监控。

退出移动版