OpenCV and Processing 16

This example continues from the last post to compute the optical flow between 2 greyscale images by using the calcOpticalFlowPyrLK() function in the Video module. The new position of the pixels tracked will be delivered in a MatOfPoint2f object. By using the last and current position of the feature points, we can plot the path of the pixel movements. Furthermore, we can use such information for interactive or generative drawings, found in my artwork, Movement in Time.


import processing.video.*;
 
import org.opencv.video.Video;
import org.opencv.core.CvType;
import org.opencv.core.TermCriteria;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfFloat;
import org.opencv.core.Point;
import org.opencv.core.Size;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
 
final int MAX_COUNT = 200;
Capture cap;
CVImage img;
PImage lastImg;
TermCriteria term;
Size subPixWinSize, winSize;
Mat grey, lastGrey;
MatOfPoint2f points, lastPoints;
float delta1, delta2;
boolean init;
 
void setup() {
  size(640, 480, P3D);
  background(0);
  System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
  cap = new Capture(this, width, height);
  cap.start();
  cap.read();
  img = new CVImage(cap.width, cap.height);
  lastImg = createImage(cap.width, cap.height, ARGB);
  term = new TermCriteria(TermCriteria.COUNT | TermCriteria.EPS, 20, 0.03);
  subPixWinSize = new Size(10, 10);
  winSize = new Size(31, 31);
 
  grey = new Mat(cap.height, cap.width, CvType.CV_8UC1);
  lastGrey = new Mat(grey.size(), grey.type());
  points = new MatOfPoint2f();
  lastPoints = new MatOfPoint2f();
 
  delta1 = 0.1;
  delta2 = min(width, height)/5.0;
  init = true;
  strokeWeight(2);
  smooth();
}
 
void draw() {
  img.copy(cap, 0, 0, cap.width, cap.height, 0, 0, img.width, img.height);
  img.toCV();
  background(0);
  //  image(img, 0, 0);
 
  grey = img.getGrey();
  if (init) {
    grey.copyTo(lastGrey);
    MatOfPoint corners = new MatOfPoint();
    Imgproc.goodFeaturesToTrack(grey, corners, MAX_COUNT, 0.01, 10, new Mat(), 3, false, 0.04);
    points = new MatOfPoint2f(corners.toArray());
    lastPoints = new MatOfPoint2f(points.toArray());
    corners.release();
    init = false;
  }
 
  if (!points.empty() && !lastPoints.empty()) {
    Imgproc.cornerSubPix(grey, points, subPixWinSize, new Size(-1, -1), term);
    MatOfByte status = new MatOfByte();
    MatOfFloat err = new MatOfFloat();
    Video.calcOpticalFlowPyrLK(lastGrey, grey, lastPoints, points, status, err, 
      winSize, 3, term, Video.OPTFLOW_USE_INITIAL_FLOW, 0.001);
    Point [] pts1 = points.toArray();
    Point [] pts2 = lastPoints.toArray();
    for (int i=0; i delta2) || 
        (p1.y-p2.y > delta2)) 
        continue;
      int x = constrain((int)p1.x, 0, img.width-1);
      int y = constrain((int)p1.y, 0, img.height-1);
      color col = img.pixels[y*img.width+x];
      stroke(col);
      line((float)p2.x, (float)p2.y, (float)p1.x, (float)p1.y);
    }
  }
 
  lastPoints = new MatOfPoint2f(points.toArray());
  grey.copyTo(lastGrey);
  lastImg.copy(img, 0, 0, img.width, img.height, 0, 0, lastImg.width, lastImg.height);
 
  fill(0);
  noStroke();
  rect(0, 0, 110, 30);
  fill(255);
  text("Frame rate: " + nf(round(frameRate), 2), 10, 20, 0);
  stroke(255, 200, 0);
}
 
void captureEvent(Capture _c) {
  _c.read();
}
 
void keyPressed() {
  if (keyCode == 32) {
    init = true;
  }
}