Skip to content

Commit

Permalink
Merge pull request #8 from haozech/develop_chz
Browse files Browse the repository at this point in the history
compataible with webgl1.0; rewrite sorting algorithm of ops; fix bugs
  • Loading branch information
haozech committed May 15, 2020
2 parents 85844f0 + bd5a5a4 commit 926fc85
Show file tree
Hide file tree
Showing 27 changed files with 1,341 additions and 1,232 deletions.
14 changes: 2 additions & 12 deletions src/feed/ImageFeed.es6
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,6 @@ export default class imageFeed {
}
}
}
console.log('this is the end of reshapetorgb !!!');
console.dir(result);
return result;
};

Expand All @@ -164,7 +162,6 @@ export default class imageFeed {
* @return {Object} 缩放后的尺寸
*/
reSize(image, params) {
console.log('execute resize!!');
// 原始图片宽高
const width = this.pixelWidth;
const height = this.pixelHeight;
Expand Down Expand Up @@ -192,7 +189,6 @@ export default class imageFeed {
* 根据scale缩放图像并且缩放成目标尺寸并居中
*/
resizeAndFitTargetSize(image, params){
console.log('execute resizeAndFitTargetSize!!');
// 原始图片宽高
const width = this.pixelWidth;
const height = this.pixelHeight;
Expand Down Expand Up @@ -249,7 +245,6 @@ export default class imageFeed {
sh = Math.round(sw * this.pixelHeight / this.pixelWidth);
y = Math.floor((targetHeight - sh) / 2);
}
// console.log(x, y, sw, sh);
if (center) {
this.fromPixels2DContext.drawImage(
image, x, y, sw, sh);
Expand Down Expand Up @@ -327,24 +322,18 @@ export default class imageFeed {
data = this.resizeAndFitTargetSize(pixels, opt);
data2 = this.fromPixels2DContext2.getImageData(0, 0, this.pixelWidth, this.pixelHeight);
}
else if (opt.scale) { // 兼容以前的,如果有scale就是短边缩放到scale模式

else if (opt.scale) { // 直接resize到targetShape Humanseg的情况
scaleSize = this.reSize(pixels, opt);
console.dir(scaleSize);
console.dir(pixels);
data = this.getImageData(opt, 0, 0, scaleSize);

data2 = this.fromPixels2DContext2.getImageData(0, 0, this.pixelWidth, this.pixelHeight);
}
else if (opt.targetSize) { // 如果有targetSize,就是装在目标宽高里的模式 TinyYolo的情况

scaleSize = this.fitToTargetSize(pixels, opt);
data = this.getImageData(opt, 0, 0, scaleSize);
data2 = this.fromPixels2DContext2.getImageData(0, 0, this.pixelWidth, this.pixelHeight);
}
}


if (opt.gray) {
data = grayscale(data);
}
Expand All @@ -359,6 +348,7 @@ export default class imageFeed {
else if (opt.targetShape) {
data = this.allReshapeToRGB(data, opt, scaleSize);
}

return [{data: data, shape: opt.shape || opt.targetShape, name: 'image', canvas: data2}];
}
}
Expand Down
89 changes: 59 additions & 30 deletions src/gpu/gpu.es6
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ export default class gpu {
console.log('float extension is started or not? ' + !!this.textureFloat);
}
}

this.maxTextureSize = gl.getParameter(gl.MAX_TEXTURE_SIZE);
this.maxTextureImageUnits = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);

// 关闭相关功能
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.STENCIL_TEST);
Expand All @@ -67,14 +71,22 @@ export default class gpu {
this.waits = 0;

console.log('WebGl版本是 ' + this.version);
console.log('MAX_TEXTURE_SIZE is ' + gl.getParameter(gl.MAX_TEXTURE_SIZE));
console.log('MAX_TEXTURE_IMAGE_UNITS is ' + gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS));
console.log('MAX_TEXTURE_SIZE is ' + this.maxTextureSize);
console.log('MAX_TEXTURE_IMAGE_UNITS is ' + this.maxTextureImageUnits);
}

getWebglVersion() {
return this.version;
}

getWebglMaxTextureSize() {
return this.maxTextureSize;
}

getWebglMaxTextureImageUnits() {
return this.maxTextureImageUnits;
}

initCache() {
// 运行次数
this.times = 0;
Expand Down Expand Up @@ -145,7 +157,6 @@ export default class gpu {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

gl.texImage2D(gl.TEXTURE_2D, // Target, matches bind above.
0, // Level of detail.
this.downloadInternalFormat, // Internal format.
Expand Down Expand Up @@ -346,6 +357,7 @@ export default class gpu {
} else {
// texture = gl.createTexture();
if (isRendered && (iLayer > 0 || (iLayer === 0 && item.tensor !== 'origin'))) {

const tData = this.cacheTextures['' + iLayer];
texture = tData[item.variable + '_' + item.tensor];
} else {
Expand All @@ -361,6 +373,7 @@ export default class gpu {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (this.version == 2){
gl.texImage2D(gl.TEXTURE_2D,
0,
this.internalFormat,
Expand All @@ -369,8 +382,27 @@ export default class gpu {
0,
this.textureFormat,
gl.FLOAT,
item.data,
0);
item.data);
}
else {
let oneSize = item.width_texture * item.height_texture;
let temp = new Float32Array(item.width_texture * item.height_texture * 4);
for (let i = 0; i < item.data.length; i++){
temp[i*4] = (item.data[i]);
temp[i*4+1] = 0;
temp[i*4+2] = 0;
temp[i*4+3] = 0;
}
gl.texImage2D(gl.TEXTURE_2D,
0,
gl.RGBA,
item.width_texture,
item.height_texture,
0,
gl.RGBA,
gl.FLOAT,
temp);
}
}
}

Expand All @@ -389,7 +421,7 @@ export default class gpu {
// 生成帧缓存的texture
makeTexure(type, data, opts = {}) {
const gl = this.gl;
let index = this.textureBufferIndex % 2;
let index = int(mod(float(this.textureBufferIndex), 2.0));
let texture = this.textureBuffer[index];
gl.bindTexture(gl.TEXTURE_2D, texture);

Expand Down Expand Up @@ -429,6 +461,7 @@ export default class gpu {
}

createPBO() {
if (this.version == 2){
const gl2 = this.gl;
const buffer = this.pbo;
gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);
Expand All @@ -437,42 +470,40 @@ export default class gpu {
gl2.readPixels(0, 0, this.width_texture_out, this.height_texture_out, gl2.RGBA, gl2.FLOAT, 0);
gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);
return buffer;
}
else {
let buffer = new Float32Array(this.width_texture_out * this.height_texture_out * 4);
const gl2 = this.gl;
gl2.readPixels(0, 0, this.width_texture_out, this.height_texture_out, gl2.RGBA, gl2.FLOAT, buffer);
return buffer;
}
}

downloadFoat32TensorFromBuffer(buffer) {
const gl2 = this.gl;
const size = 4 * this.width_texture_out * this.height_texture_out;
if (this.version == 2){
const pixels = new Float32Array(size);
gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);
gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, pixels);
gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);
// log.start('后处理-readloop');
// let result = [];
// let offset = 0;
// for (let h = 0; h < this.height_texture_out; h++) {
// // 纪录第1和2行数据
// let temp1 = [];
// let temp2 = [];
// for (let w = 0; w < this.width_texture_out; w++) {
// temp1.push(pixels[offset]);
// temp1.push(pixels[offset + 1]);
// temp2.push(pixels[offset + 2]);
// temp2.push(pixels[offset + 3]);
// offset += 4;
// }
// result = result.concat(temp1);
// result = result.concat(temp2);
// }
let result = [];
for (let i = 0; i < this.width_texture_out * this.height_texture_out; i++) {
result.push(pixels[4 * i]);
}
// const result = Array.prototype.slice.call(pixels);
// console.dir(['result', result]);
// log.end('后处理-readloop');
return result;
}
else {
let pixels = buffer;
let result = [];
for (let i = 0; i < this.width_texture_out * this.height_texture_out; i++) {
result.push(pixels[4 * i]);
}
return result;
}
}


getWebglError(status) {
const gl2 = this.gl;
switch (status) {
Expand All @@ -497,7 +528,7 @@ export default class gpu {

createAndWaitForFence() {
const gl2 = this.gl;
const isFenceEnabled = (gl2.fenceSync !== null);
const isFenceEnabled = (gl2.fenceSync != null);
let isFencePassed = () => true;
if (isFenceEnabled) {
const sync = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);
Expand Down Expand Up @@ -531,10 +562,8 @@ export default class gpu {
let pixels = new Float32Array(this.width_texture_out * this.height_texture_out * 4);
// gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
const tt2 = +Date.now();
gl.readPixels(0, 0, this.width_texture_out, this.height_texture_out, gl.RGBA, gl.FLOAT, pixels, 0);
gl.readPixels(0, 0, this.width_texture_out, this.height_texture_out, gl.RGBA, gl.FLOAT, pixels);
// console.log('本次读取数据时间是' + (+Date.now() - tt2)+ ',' + (tt2 - tt));
// log.end('后处理-readinside');
// log.start('后处理-readloop');
let result = [];
for (let i = 0; i < this.width_texture_out * this.height_texture_out; i++) {
result.push(pixels[4 * i]);
Expand Down
Loading

0 comments on commit 926fc85

Please sign in to comment.