The following example ported the original OpenCV motion template sample code in C to Java/Processing. The original source is the motempl.c file in the OpenCV distribution.
The program started using the default video capture device and passed it to the class Motion. It employed the accumulated difference images to segment into different motion regions, delivered back with a list of rectangles, indicating where the motion components are. It then returned to the Processing main program with an ArrayList of the class Result.
Main program
import processing.video.*; Capture cap; // The Motion class is the adoptation of the motion template // example from OpenCV. Motion mot; // The Result class is a temporary container for the position, // size, and angle of the motion component. ArrayList<Result> myRes; void setup() { size(640, 480); cap = new Capture(this, width, height); cap.start(); stroke(255, 0, 0); background(0); mot = new Motion(); mot.setup(width, height); } void draw() { if (cap.available()) { cap.read(); } else { return; } image(mot.update(cap), 0, 0); myRes = mot.getRes(); // To draw individual motion vector from the the list of results. for (int i=0; i<myRes.size(); i++) { Result rr = myRes.get(i); int cx = rr.x + rr.w/2; int cy = rr.y + rr.h/2; int r1 = min(rr.w, rr.h)/2; int r2 = min(10, r1/6); int x1 = (int)(cx+r1*cos(radians(rr.angle))); int y1 = (int)(cy+r1*sin(radians(rr.angle))); // Draw the arrows. line(x1, y1, cx, cy); line(x1, y1, x1+r2*cos(radians((180.0+rr.angle-20))), y1+r2*sin(radians((180.0+rr.angle-20)))); line(x1, y1, x1+r2*cos(radians((180.0+rr.angle+20))), y1+r2*sin(radians((180.0+rr.angle+20)))); } text("Frame Rate: " + round(frameRate), 500, 50); } |
Motion class
import java.util.*; import java.awt.image.DataBufferByte; import java.nio.*; import org.opencv.core.Core; import org.opencv.core.Mat; import org.opencv.core.MatOfRect; import org.opencv.core.CvType; import org.opencv.video.Video; import org.opencv.core.Rect; import org.opencv.core.Scalar; import org.opencv.imgproc.Imgproc; class Motion { int w; int h; final double mhi_duration = 1; final double max_time_delta = 0.5; final double min_time_delta = 0.05; final int N = 4; final int smallRect = 100; final float smallFactor = 0.05; Mat mhi; Mat orient; Mat mask; Mat segmask; Mat silh; Mat emp; Mat alp; ArrayList<Result> res; Vector<Mat> buf; double timestamp; long lastTime; int last = 0; byte [] bArray; int [] iArray; int pixCnt1; int pixCnt2; Motion() { // Load the external library for OpenCV. My version contains // libopencv_java248.dylib // opencv-248.jar // The rest of the OpenCV dynamic libraries are in /usr/local/lib System.loadLibrary(Core.NATIVE_LIBRARY_NAME); println(Core.VERSION); } void setup(int _w, int _h) { // To initialise all the variables and data structures. w = _w; h = _h; // The variables b1, t1, and bm are temporary containers to transfer // pixel information between PImage and OpenCV Mat. pixCnt1 = w*h*4; pixCnt2 = w*h; bArray = new byte[pixCnt1]; iArray = new int[pixCnt2]; buf = new Vector<Mat>(); for (int i=0; i<N; i++) { Mat m = new Mat(h, w, CvType.CV_8UC1, new Scalar(0)); buf.add(m); } silh = new Mat(h, w, CvType.CV_8UC1, new Scalar(0)); mhi = new Mat(h, w, CvType.CV_32FC1, new Scalar(0)); orient = new Mat(h, w, CvType.CV_32FC1, new Scalar(0)); segmask = new Mat(h, w, CvType.CV_32FC1, new Scalar(0)); mask = new Mat(h, w, CvType.CV_8UC1, new Scalar(0)); emp = new Mat(h, w, CvType.CV_8UC1, new Scalar(0)); alp = new Mat(h, w, CvType.CV_8UC1, new Scalar(255)); lastTime = System.currentTimeMillis(); } PImage update(PImage _m) { PImage img = createImage(w, h, ARGB); long elapsed = System.currentTimeMillis() - lastTime; timestamp = elapsed/1000.0; arrayCopy(_m.pixels, iArray); ByteBuffer bBuf = ByteBuffer.allocate(pixCnt1); IntBuffer iBuf = bBuf.asIntBuffer(); iBuf.put(iArray); bBuf.get(bArray); Mat m1 = new Mat(_m.height, _m.width, CvType.CV_8UC4); m1.put(0, 0, bArray); int idx2; int idx1 = last; Imgproc.cvtColor(m1, buf.elementAt(last), Imgproc.COLOR_BGRA2GRAY); idx2 = (last+1) % N; last = idx2; silh = buf.elementAt(idx2); Core.absdiff(buf.elementAt(idx1), buf.elementAt(idx2), silh); Imgproc.threshold(silh, silh, 30, 1, Imgproc.THRESH_BINARY); Video.updateMotionHistory(silh, mhi, timestamp, mhi_duration); mhi.convertTo(mask, CvType.CV_8UC1, 255.0/mhi_duration, (mhi_duration-timestamp)*255.0/mhi_duration); // The Mat dst is the output motion difference image. Mat dst = new Mat(h, w, CvType.CV_8UC4, new Scalar(0)); List<Mat> temp = new ArrayList<Mat>(); temp.add(alp); temp.add(emp); temp.add(emp); temp.add(mask); Core.merge(temp, dst); dst.get(0, 0, bArray); ByteBuffer.wrap(bArray).order(ByteOrder.BIG_ENDIAN).asIntBuffer().get(iArray); arrayCopy(iArray, img.pixels); img.updatePixels(); Video.calcMotionGradient(mhi, mask, orient, max_time_delta, min_time_delta, 3); MatOfRect targets = new MatOfRect(); Video.segmentMotion(mhi, segmask, targets, timestamp, max_time_delta); Rect [] rects = targets.toArray(); // The variable res is the list of rectangles of the motion components. res = new ArrayList<Result>(); for (int i=0; i<rects.length; i++) { Mat silhroi = new Mat(silh, rects[i]); Mat mhiroi = new Mat(mhi, rects[i]); Mat orientroi = new Mat(orient, rects[i]); Mat maskroi = new Mat(mask, rects[i]); double angle = Video.calcGlobalOrientation(orientroi, maskroi, mhiroi, timestamp, mhi_duration); angle = 360 - angle; double count = Core.norm(silhroi, Core.NORM_L1); int x = rects[i].x; int y = rects[i].y; int w = rects[i].width; int h = rects[i].height; if ((w+h) < smallRect) continue; if (count < (float(w)*float(h)*smallFactor)) continue; Result r = new Result(x, y, w, h, (float)angle); res.add(r); } return (img); } ArrayList<Result> getRes() { return res; } } |
Result class
class Result { // A temporary class to hold the motion component result. int x; int y; int w; int h; float angle; Result(int _x, int _y, int _w, int _h, float _a) { x = _x; y = _y; w = _w; h = _h; angle = _a; } } |