Skip to content

Commit

Permalink
Use WebGL for YCbCr to RGBA conversion where available; see phoboslab#1
Browse files Browse the repository at this point in the history
  • Loading branch information
phoboslab committed Nov 4, 2014
1 parent 72b3135 commit ad6abe9
Showing 1 changed file with 148 additions and 41 deletions.
189 changes: 148 additions & 41 deletions jsmpg.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,20 @@ var jsmpeg = window.jsmpeg = function( url, opts ) {
this.externalLoadCallback = opts.onload || null;
this.externalDecodeCallback = opts.ondecodeframe || null;
this.externalFinishedCallback = opts.onfinished || null;
this.bwFilter = opts.bwFilter || false;

this.customIntraQuantMatrix = new Uint8Array(64);
this.customNonIntraQuantMatrix = new Uint8Array(64);
this.blockData = new Int32Array(64);
this.zeroBlockData = new Int32Array(64);
this.fillArray(this.zeroBlockData, 0);

this.canvasContext = this.canvas.getContext('2d');

// use WebGL for YCbCrToRGBA conversion if possible (much faster)
if( !opts.forceCanvas2D && this.initWebGL() ) {
this.renderFrame = this.renderFrameGL;
} else {
this.canvasContext = this.canvas.getContext('2d');
this.renderFrame = this.renderFrame2D;
}

if( url instanceof WebSocket ) {
this.client = url;
Expand Down Expand Up @@ -271,7 +276,10 @@ jsmpeg.prototype.load = function( url ) {
that.loadCallback(request.response);
}
};
request.onprogress = this.updateLoader.bind(this);

if( !this.gl ) {
request.onprogress = this.updateLoader.bind(this);
}

request.open('GET', url);
request.responseType = "arraybuffer";
Expand Down Expand Up @@ -542,13 +550,10 @@ jsmpeg.prototype.initBuffers = function() {
this.canvas.width = this.width;
this.canvas.height = this.height;

this.currentRGBA = this.canvasContext.getImageData(0, 0, this.width, this.height);

if( this.bwFilter ) {
// This fails in IE10; don't use the bwFilter if you need to support it.
this.currentRGBA32 = new Uint32Array( this.currentRGBA.data.buffer );
if( !this.gl ) {
this.currentRGBA = this.canvasContext.getImageData(0, 0, this.width, this.height);
this.fillArray(this.currentRGBA.data, 255);
}
this.fillArray(this.currentRGBA.data, 255);
};


Expand Down Expand Up @@ -617,13 +622,7 @@ jsmpeg.prototype.decodePicture = function(skipOutput) {


if( skipOutput != DECODE_SKIP_OUTPUT ) {
if( this.bwFilter ) {
this.YToRGBA();
}
else {
this.YCbCrToRGBA();
}
this.canvasContext.putImageData(this.currentRGBA, 0, 0);
this.renderFrame();

if(this.externalDecodeCallback) {
this.externalDecodeCallback(this, this.canvas);
Expand Down Expand Up @@ -725,31 +724,113 @@ jsmpeg.prototype.YCbCrToRGBA = function() {
}
};

jsmpeg.prototype.YToRGBA = function() {
// Luma only
var pY = this.currentY;
var pRGBA = this.currentRGBA32;

var yIndex = 0;
var yNext2Lines = (this.codedWidth - this.width);
jsmpeg.prototype.renderFrame2D = function() {
this.YCbCrToRGBA();
this.canvasContext.putImageData(this.currentRGBA, 0, 0);
};

var rgbaIndex = 0;
var cols = this.width;
var rows = this.height;

var y;
// ----------------------------------------------------------------------------
// Accelerated WebGL YCbCrToRGBA conversion

jsmpeg.prototype.gl = null;
jsmpeg.prototype.program = null;
jsmpeg.prototype.YTexture = null;
jsmpeg.prototype.UTexture = null;
jsmpeg.prototype.VTexture = null;

jsmpeg.prototype.createTexture = function(index, name) {
var gl = this.gl;
var texture = gl.createTexture();

gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.uniform1i(gl.getUniformLocation(this.program, name), index);

return texture;
};

for( var row = 0; row < rows; row++ ) {
for( var col = 0; col < cols; col++ ) {
y = pY[yIndex++];
pRGBA[rgbaIndex++] = 0xff000000 | y << 16 | y << 8 | y;
}
jsmpeg.prototype.compileShader = function(type, source) {
var gl = this.gl;
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);

if( !gl.getShaderParameter(shader, gl.COMPILE_STATUS) ) {
throw new Error(gl.getShaderInfoLog(shader));
}

yIndex += yNext2Lines;
return shader;
};

jsmpeg.prototype.initWebGL = function() {
// attempt to get a webgl context
try {
var gl = this.gl = this.canvas.getContext('webgl') || this.canvas.getContext('experimental-webgl');
} catch (e) {
return false;
}

if (!gl) {
return false;
}

// setup shaders
this.program = gl.createProgram();
gl.attachShader(this.program, this.compileShader(gl.VERTEX_SHADER, YCBCRTORGBA_VERTEX_SHADER));
gl.attachShader(this.program, this.compileShader(gl.FRAGMENT_SHADER, YCBCRTORGBA_FRAGMENT_SHADER));
gl.linkProgram(this.program);

if( !gl.getProgramParameter(this.program, gl.LINK_STATUS) ) {
throw new Error(gl.getProgramInfoLog(this.program));
}

gl.useProgram(this.program);

// setup textures
this.YTexture = this.createTexture(0, 'YTexture');
this.UTexture = this.createTexture(1, 'UTexture');
this.VTexture = this.createTexture(2, 'VTexture');

// init buffers
this.buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 0, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW);

var vertexAttr = gl.getAttribLocation(this.program, 'vertex');
gl.enableVertexAttribArray(vertexAttr);
gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0);

return true;
};

jsmpeg.prototype.renderFrameGL = function() {
var gl = this.gl;

// WebGL doesn't like Uint8ClampedArrays, so we have to create a Uint8Array view for
// each plane
var uint8Y = new Uint8Array(this.currentY.buffer),
uint8Cr = new Uint8Array(this.currentCr.buffer),
uint8Cb = new Uint8Array(this.currentCb.buffer);

gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, this.YTexture);

gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, this.width, this.height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uint8Y);

gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, this.UTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, this.halfWidth, this.halfHeight, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uint8Cr);

gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, this.VTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, this.halfWidth, this.halfHeight, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uint8Cb);

gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
};


// ----------------------------------------------------------------------------
Expand Down Expand Up @@ -1054,12 +1135,6 @@ jsmpeg.prototype.copyMacroblock = function(motionH, motionV, sY, sCr, sCb ) {
}
}
}

if( this.bwFilter ) {
// No need to copy chrominance when black&white filter is active
return;
}


// Chrominance

Expand Down Expand Up @@ -2210,7 +2285,39 @@ var
START_SLICE_LAST = 0xAF,
START_PICTURE = 0x00,
START_EXTENSION = 0xB5,
START_USER_DATA = 0xB2;
START_USER_DATA = 0xB2,

// Shaders for accelerated WebGL YCbCrToRGBA conversion
YCBCRTORGBA_FRAGMENT_SHADER = [
'precision highp float;',
'uniform sampler2D YTexture;',
'uniform sampler2D UTexture;',
'uniform sampler2D VTexture;',
'varying vec2 texCoord;',

'void main() {',
'float y = texture2D(YTexture, texCoord).r;',
'float cr = texture2D(UTexture, texCoord).r - 0.5;',
'float cb = texture2D(VTexture, texCoord).r - 0.5;',

'gl_FragColor = vec4(',
'y + 1.4 * cr,',
'y + -0.343 * cb - 0.711 * cr,',
'y + 1.765 * cb,',
'1.0',
');',
'}'
].join('\n'),

YCBCRTORGBA_VERTEX_SHADER = [
'attribute vec2 vertex;',
'varying vec2 texCoord;',

'void main() {',
'texCoord = vertex;',
'gl_Position = vec4((vertex * 2.0 - 1.0) * vec2(1, -1), 0.0, 1.0);',
'}'
].join('\n');

var MACROBLOCK_TYPE_TABLES = [
null,
Expand Down

0 comments on commit ad6abe9

Please sign in to comment.