こつこつ

ほぼ自分用の備忘録

Optical Flowの実装

  • 授業で作成したのでメモ
  • calcOpticalFlowPyrLKを使用
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/video/tracking.hpp>

using namespace cv;
using namespace std;

int main() {

    // 動画ファイルの読み込み
    VideoCapture capture = VideoCapture("movie.mp4");

    // 前のフレームを保存しておく
    Mat prev;
    capture >> prev;

    // 特徴点格納用
    vector<Point2f> prevCorners;
    vector<Point2f> currCorners;
    vector<uchar> featuresFound;
    vector<float> featuresErrors;

    // 追跡する特徴点を求める
    Mat prevGray;
    cvtColor(prev, prevGray, CV_RGB2GRAY);
    goodFeaturesToTrack(prevGray, prevCorners, 1000, 0.3, 7);
    cornerSubPix(prevGray, prevCorners, Size(10, 10), Size(-1, -1), TermCriteria(TermCriteria::COUNT | TermCriteria::EPS, 10, 0.03));

    while (cv::waitKey(100) != 27) {

        // 現在のフレームを保存
        Mat curr;
        capture >> curr;

        Mat currGray;
        cvtColor(curr, currGray, CV_RGB2GRAY);
        
        calcOpticalFlowPyrLK(
            prevGray,
            currGray,
            prevCorners,
            currCorners,
            featuresFound,
            featuresErrors);
        
        for (int i = 0; i < featuresFound.size(); i++) {
            if (featuresFound[0] == 0 || featuresErrors[i] > 550) {
                continue;
            }

            Point p1 = Point((int)prevCorners[i].x, (int)prevCorners[i].y);
            Point p2 = Point((int)currCorners[i].x, (int)currCorners[i].y);
            line(curr, p1, p2, Scalar(255, 0, 0), 2);
        }

        // 表示
        imshow("input", curr);
        
        prev = curr;

    }

    return 0;
}