Pixel Buffer Object in Processing

People switching from Processing to OpenFrameworks or other more serious development platforms due to performance consideration. I have done a few searches and found that there are a number of libraries using different Java bindings of OpenCL, Vertex Buffer Object, Pixel Buffer Object, and even DirectShow. I wonder if it is more possible to use Processing in production environment where performance is important.

I have done a test to compare using live webcam video stream with traditional texture method and another one with pixel buffer object. The performance difference is noticeable and significant using my MacBook Pro. I do not record the videos as it may distort the real time performance.

This is the ‘traditional’ method.

import processing.video.*;
import processing.opengl.*;
 
float a;
 
Capture cap;
PImage img;
 
void setup()
{
  println(Capture.list());
  size(640, 480, OPENGL);
  hint(ENABLE_OPENGL_2X_SMOOTH);
  hint(DISABLE_DEPTH_TEST);
  a = 0;
 
  img = loadImage("tron.jpg");
  frameRate(30);
  cap = new Capture(this, width, height, 30);
  cap.read();
  textureMode(NORMALIZED);
}
 
void draw()
{
  background(0);
  image(img, 0, 0);
  translate(width/2, height/2, 0);
  float b = a*PI/180.0;
  rotateY(b);
  rotateX(b);
  beginShape(QUADS);
  texture(cap);
  vertex(-320, -240, 0, 0, 0);
  vertex( 320, -240, 0, 1, 0);
  vertex( 320, 240, 0, 1, 1);
  vertex(-320, 240, 0, 0, 1);
  endShape();
  a += 1;
  a %= 360;
}
 
void captureEvent(Capture _c)
{
  _c.read();
}

 

 
This is the PBO mehtod.

import processing.video.*;
import processing.opengl.*;
import javax.media.opengl.*;
import java.nio.IntBuffer;
 
float a;
PGraphicsOpenGL pgl;
GL gl;
PImage img;
 
int [] tex = new int[1];
int [] pbo = new int[1];
 
Capture cap;
 
void setup()
{
  println(Capture.list());
  size(640, 480, OPENGL);
  hint(ENABLE_OPENGL_2X_SMOOTH);
  hint(DISABLE_DEPTH_TEST);
  a = 0;
 
  img = loadImage("tron.jpg");
  frameRate(30);
  pgl = (PGraphicsOpenGL) g;
  cap = new Capture(this, width, height, 30);
  cap.read();
 
  gl = pgl.gl;
 
  gl.glGenBuffers(1, pbo, 0);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, pbo[0]);  
  gl.glBufferData(GL.GL_PIXEL_UNPACK_BUFFER, 4*cap.width*cap.height, null, GL.GL_STREAM_DRAW);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, 0);
 
  gl.glGenTextures(1, tex, 0);
  gl.glBindTexture(GL.GL_TEXTURE_2D, tex[0]);
 
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP);
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP);
 
  gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGBA, cap.width, cap.height, 0, GL.GL_BGRA, GL.GL_UNSIGNED_BYTE, null);
  gl.glBindTexture(GL.GL_TEXTURE_2D, 0);
}
 
void draw()
{
  background(0);
  image(img, 0, 0);
 
  gl = pgl.beginGL();
  gl.glColor3f( 1.0f, 1.0f, 1.0f);	
 
  gl.glEnable(GL.GL_TEXTURE_2D);
 
  gl.glBindTexture(GL.GL_TEXTURE_2D, tex[0]);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, pbo[0]);
 
  gl.glTexSubImage2D(GL.GL_TEXTURE_2D, 0, 0, 0, cap.width, cap.height, GL.GL_BGRA, GL.GL_UNSIGNED_BYTE, 
  0);
 
  gl.glBufferData(GL.GL_PIXEL_UNPACK_BUFFER, 4*cap.width*cap.height, null, GL.GL_STREAM_DRAW);
 
  IntBuffer tmp1 = gl.glMapBuffer(GL.GL_PIXEL_UNPACK_BUFFER, GL.GL_WRITE_ONLY).asIntBuffer();
  tmp1.put(cap.pixels);
 
  gl.glUnmapBuffer(GL.GL_PIXEL_UNPACK_BUFFER);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, 0);
 
  gl.glTranslatef(width/2, height/2, 0);
  gl.glRotatef(a, 1, 1, 0);
 
  gl.glBegin(GL.GL_QUADS);	
  gl.glTexCoord2f(0.0f, 0.0f);			
  gl.glVertex3f(-320, -240, 0);
  gl.glTexCoord2f(1.0f, 0.0f);
  gl.glVertex3f( 320, -240, 0);
  gl.glTexCoord2f(1.0f, 1.0f);
  gl.glVertex3f( 320, 240, 0);
  gl.glTexCoord2f(0.0f, 1.0f);
  gl.glVertex3f(-320, 240, 0);
  gl.glEnd();
  gl.glBindTexture(GL.GL_TEXTURE_2D, 0);
  pgl.endGL();
  a += 1.0;
  a %= 360;
}
 
void captureEvent(Capture _c)
{
  _c.read();
}