Added toys that enable visual debugging of Aard
This commit is contained in:
parent
7c1d3e52e5
commit
e198c69741
@ -10,6 +10,7 @@ import { Corner } from './enums/corner.enum';
|
||||
import { VideoPlaybackState } from './enums/video-playback-state.enum';
|
||||
import { FallbackCanvas } from './gl/FallbackCanvas';
|
||||
import { GlCanvas } from './gl/GlCanvas';
|
||||
import { GlDebugCanvas, GlDebugType } from './gl/GlDebugCanvas';
|
||||
import { AardCanvasStore } from './interfaces/aard-canvas-store.interface';
|
||||
import { AardDetectionSample, generateSampleArray, resetSamples } from './interfaces/aard-detection-sample.interface';
|
||||
import { AardStatus, initAardStatus } from './interfaces/aard-status.interface';
|
||||
@ -251,6 +252,7 @@ export class Aard {
|
||||
private testResults: AardTestResults;
|
||||
private canvasSamples: AardDetectionSample;
|
||||
|
||||
|
||||
private forceFullRecheck: boolean = true;
|
||||
//#endregion
|
||||
|
||||
@ -311,6 +313,14 @@ export class Aard {
|
||||
),
|
||||
};
|
||||
|
||||
|
||||
try {
|
||||
this.showDebugCanvas();
|
||||
this.canvasStore.main.showCanvas();
|
||||
} catch (e) {
|
||||
console.error('FALIED TO CREATE DEBUGG CANVAS', e);
|
||||
}
|
||||
|
||||
this.startCheck();
|
||||
}
|
||||
|
||||
@ -349,6 +359,19 @@ export class Aard {
|
||||
throw 'AARD_INVALID_SETTINGS';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and shows debug canvas
|
||||
* @param canvasId
|
||||
*/
|
||||
private showDebugCanvas() {
|
||||
console.log('SHOWING DEBUG CANVAS!')
|
||||
if (!this.canvasStore.debug) {
|
||||
this.canvasStore.debug = new GlDebugCanvas({...this.settings.active.arDetect.canvasDimensions.sampleCanvas, id: 'uw-debug-gl'});
|
||||
}
|
||||
this.canvasStore.debug.show();
|
||||
this.canvasStore.debug.drawVideoFrame(this.canvasStore.main.canvas);
|
||||
}
|
||||
//#endregion
|
||||
|
||||
/**
|
||||
@ -453,11 +476,13 @@ export class Aard {
|
||||
*/
|
||||
private async main() {
|
||||
try {
|
||||
let imageData: Uint8Array;
|
||||
|
||||
// We abuse a do-while loop to eat our cake (get early returns)
|
||||
// and have it, too (if we return early, we still execute code
|
||||
// at the end of this function)
|
||||
do {
|
||||
const imageData = await new Promise<Uint8Array>(
|
||||
imageData = await new Promise<Uint8Array>(
|
||||
resolve => {
|
||||
try {
|
||||
this.canvasStore.main.drawVideoFrame(this.video);
|
||||
@ -571,7 +596,9 @@ export class Aard {
|
||||
// (as aspect ratio may have been set manually while autodetection was off)
|
||||
if (this.testResults.notLetterbox) {
|
||||
// console.log('————not letterbox')
|
||||
console.warn('DETECTED NOT LETTERBOX! (resetting)')
|
||||
this.updateAspectRatio(this.defaultAr);
|
||||
return;
|
||||
}
|
||||
|
||||
// if detection is uncertain, we don't do anything at all (unless if guardline was broken, in which case we reset)
|
||||
@ -579,9 +606,8 @@ export class Aard {
|
||||
// console.info('aspect ratio not certain:', this.testResults.aspectRatioUncertainReason);
|
||||
// console.warn('check finished:', JSON.parse(JSON.stringify(this.testResults)), JSON.parse(JSON.stringify(this.canvasSamples)), '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n');
|
||||
|
||||
if (this.testResults.guardLine.invalidated) {
|
||||
this.updateAspectRatio(this.defaultAr);
|
||||
}
|
||||
console.warn('ASPECT RATIO UNCERTAIN, GUARD LINE INVALIDATED (resetting)')
|
||||
this.updateAspectRatio(this.defaultAr);
|
||||
|
||||
return;
|
||||
}
|
||||
@ -589,11 +615,11 @@ export class Aard {
|
||||
// TODO: emit debug values if debugging is enabled
|
||||
this.testResults.isFinished = true;
|
||||
|
||||
// console.warn(
|
||||
// `[${(+new Date() % 10000) / 100} | ${this.arid}]`,'check finished — aspect ratio updated:', this.testResults.aspectRatioUpdated,
|
||||
// '\ndetected ar:', this.testResults.activeAspectRatio, '->', this.getAr(),
|
||||
// '\nis video playing?', this.getVideoPlaybackState() === VideoPlaybackState.Playing,
|
||||
// '\n\n', JSON.parse(JSON.stringify(this.testResults)), JSON.parse(JSON.stringify(this.canvasSamples)), '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n');
|
||||
console.warn(
|
||||
`[${(+new Date() % 10000) / 100} | ${this.arid}]`,'check finished — aspect ratio updated:', this.testResults.aspectRatioUpdated,
|
||||
'\ndetected ar:', this.testResults.activeAspectRatio, '->', this.getAr(),
|
||||
'\nis video playing?', this.getVideoPlaybackState() === VideoPlaybackState.Playing,
|
||||
'\n\n', JSON.parse(JSON.stringify(this.testResults)), JSON.parse(JSON.stringify(this.canvasSamples)), '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n');
|
||||
|
||||
// if edge width changed, emit update event.
|
||||
// except aspectRatioUpdated doesn't get set reliably, so we just call update every time, and update
|
||||
@ -603,6 +629,10 @@ export class Aard {
|
||||
// }
|
||||
|
||||
// if we got "no letterbox" OR aspectRatioUpdated
|
||||
|
||||
if (this.canvasStore.debug) {
|
||||
this.canvasStore.debug.drawBuffer(imageData);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('[Ultrawidify] Aspect ratio autodetection crashed for some reason.\n\nsome reason:', e);
|
||||
this.videoData.resizer.setAr({type: AspectRatioType.AutomaticUpdate, ratio: this.defaultAr});
|
||||
@ -682,11 +712,13 @@ export class Aard {
|
||||
pixelValues[pvi++] = imageData[px_r];
|
||||
pixelValues[pvi++] = imageData[px_r + 1];
|
||||
pixelValues[pvi++] = imageData[px_r + 2];
|
||||
imageData[px_r + 3] = GlDebugType.BlackLevelSample;
|
||||
|
||||
const endpx_r = px_r + (width * 4) - (i * 8) - 4; // -4 because 4 bytes per pixel, and - twice the offset to mirror the diagonal
|
||||
pixelValues[pvi++] = imageData[endpx_r];
|
||||
pixelValues[pvi++] = imageData[endpx_r + 1];
|
||||
pixelValues[pvi++] = imageData[endpx_r + 2];
|
||||
imageData[endpx_r + 3] = GlDebugType.BlackLevelSample;
|
||||
}
|
||||
|
||||
// now let's populate the bottom two corners
|
||||
@ -697,11 +729,13 @@ export class Aard {
|
||||
pixelValues[pvi++] = imageData[px_r];
|
||||
pixelValues[pvi++] = imageData[px_r + 1];
|
||||
pixelValues[pvi++] = imageData[px_r + 2];
|
||||
imageData[px_r + 3] = GlDebugType.BlackLevelSample;
|
||||
|
||||
const endpx_r = px_r + (width * 4) - (i * 8) - 4; // -4 because 4 bytes per pixel, and - twice the offset to mirror the diagonal
|
||||
pixelValues[pvi++] = imageData[endpx_r];
|
||||
pixelValues[pvi++] = imageData[endpx_r + 1];
|
||||
pixelValues[pvi++] = imageData[endpx_r + 2];
|
||||
imageData[endpx_r + 3] = GlDebugType.BlackLevelSample;
|
||||
}
|
||||
|
||||
let min = 255;
|
||||
@ -796,7 +830,10 @@ export class Aard {
|
||||
|| imageData[i + 1] > this.testResults.blackThreshold
|
||||
|| imageData[i + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerViolation;
|
||||
this.testResults.guardLine.cornerPixelsViolated[Corner.TopLeft]++;
|
||||
} else {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerOk;
|
||||
}
|
||||
i += 4;
|
||||
}
|
||||
@ -806,12 +843,15 @@ export class Aard {
|
||||
|| imageData[i + 1] > this.testResults.blackThreshold
|
||||
|| imageData[i + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[i + 3] = GlDebugType.GuardLineViolation;
|
||||
// DONT FORGET TO INVALIDATE GUARDL LINE
|
||||
this.testResults.guardLine.top = -1;
|
||||
this.testResults.guardLine.bottom = -1;
|
||||
this.testResults.guardLine.invalidated = true;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i + 3] = GlDebugType.GuardLineOk;
|
||||
}
|
||||
i += 4;
|
||||
}
|
||||
while (i < rowEnd) {
|
||||
@ -820,7 +860,10 @@ export class Aard {
|
||||
|| imageData[i + 1] > this.testResults.blackThreshold
|
||||
|| imageData[i + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerViolation;
|
||||
this.testResults.guardLine.cornerPixelsViolated[Corner.TopRight]++;
|
||||
} else {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerOk;
|
||||
}
|
||||
i += 4; // skip over alpha channel
|
||||
}
|
||||
@ -842,7 +885,10 @@ export class Aard {
|
||||
|| imageData[i + 1] > this.testResults.blackThreshold
|
||||
|| imageData[i + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerViolation;
|
||||
this.testResults.guardLine.cornerPixelsViolated[Corner.BottomLeft]++;
|
||||
} else {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerOk;
|
||||
}
|
||||
i += 4; // skip over alpha channel
|
||||
}
|
||||
@ -855,12 +901,15 @@ export class Aard {
|
||||
|| imageData[i + 1] > this.testResults.blackThreshold
|
||||
|| imageData[i + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[i + 3] = GlDebugType.GuardLineViolation;
|
||||
// DONT FORGET TO INVALIDATE GUARDL LINE
|
||||
this.testResults.guardLine.top = -1;
|
||||
this.testResults.guardLine.bottom = -1;
|
||||
this.testResults.guardLine.invalidated = true;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i + 3] = GlDebugType.GuardLineOk;
|
||||
}
|
||||
i += 4;
|
||||
}
|
||||
if (i % 4) {
|
||||
@ -872,7 +921,10 @@ export class Aard {
|
||||
|| imageData[i + 1] > this.testResults.blackThreshold
|
||||
|| imageData[i + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerViolation;
|
||||
this.testResults.guardLine.cornerPixelsViolated[Corner.BottomRight]++;
|
||||
} else {
|
||||
imageData[i + 3] = GlDebugType.GuardLineCornerOk;
|
||||
}
|
||||
i += 4; // skip over alpha channel
|
||||
}
|
||||
@ -953,8 +1005,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -965,8 +1020,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
if (! this.testResults.guardLine.cornerViolated[Corner.TopRight]) {
|
||||
@ -977,8 +1035,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -993,8 +1054,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -1017,8 +1081,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -1043,8 +1110,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -1055,8 +1125,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
if (! this.testResults.guardLine.cornerViolated[Corner.TopRight]) {
|
||||
@ -1067,8 +1140,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -1083,8 +1159,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -1095,8 +1174,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
if (! this.testResults.guardLine.cornerViolated[Corner.TopRight]) {
|
||||
@ -1107,8 +1189,11 @@ export class Aard {
|
||||
imagePixel ||= imageData[i++] > this.testResults.blackThreshold;
|
||||
|
||||
if (imagePixel && ++pixelCount > detectionThreshold) {
|
||||
imageData[i] = GlDebugType.ImageLineThresholdReached;
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
imageData[i] = imagePixel ? GlDebugType.ImageLineOk : GlDebugType.ImageLineFail;
|
||||
}
|
||||
i++; // skip over alpha channel
|
||||
}
|
||||
}
|
||||
@ -1222,11 +1307,13 @@ export class Aard {
|
||||
|| imageData[rowOffset + x + 2] > this.testResults.blackLevel;
|
||||
|
||||
if (!isImage) {
|
||||
imageData[rowOffset + x + 3] = GlDebugType.EdgeScanProbe;
|
||||
// TODO: maybe some day mark this pixel as checked by writing to alpha channel
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
if (this.canvasSamples.top[i] === -1) {
|
||||
imageData[rowOffset + x + 3] = GlDebugType.EdgeScanHit;
|
||||
this.canvasSamples.top[i] = row;
|
||||
finishedRows++;
|
||||
}
|
||||
@ -1270,12 +1357,14 @@ export class Aard {
|
||||
|| imageData[rowOffset + x + 2] > this.testResults.blackLevel;
|
||||
|
||||
if (!isImage) {
|
||||
imageData[rowOffset + x + 3] = GlDebugType.EdgeScanProbe;
|
||||
// console.log('(row:', row, ')', 'val:', imageData[rowOffset + x], 'col', x >> 2, x, 'pxoffset:', rowOffset + x, 'len:', imageData.length)
|
||||
// TODO: maybe some day mark this pixel as checked by writing to alpha channel
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
if (this.canvasSamples.bottom[i] === -1) {
|
||||
imageData[rowOffset + x + 3] = GlDebugType.EdgeScanHit;
|
||||
this.canvasSamples.bottom[i] = row;
|
||||
finishedRows++;
|
||||
}
|
||||
@ -1326,8 +1415,11 @@ export class Aard {
|
||||
|| imageData[xs + 1] > this.testResults.blackThreshold
|
||||
|| imageData[xs + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[xs + 3] = GlDebugType.SlopeTestDarkViolation;
|
||||
this.canvasSamples.top[i + 1] = -1;
|
||||
break;
|
||||
} else {
|
||||
imageData[xs + 3] = GlDebugType.SlopeTestDarkOk;
|
||||
}
|
||||
xs += 4;
|
||||
}
|
||||
@ -1339,7 +1431,7 @@ export class Aard {
|
||||
while (i < this.canvasSamples.bottom.length) {
|
||||
// calculate row offset:
|
||||
i1 = i + 1;
|
||||
row = (this.canvasSamples.bottom[i1] - 1) * width * 4;
|
||||
row = (this.canvasSamples.bottom[i1] + 1) * width * 4;
|
||||
xs = row + this.canvasSamples.bottom[i] - slopeTestSample;
|
||||
xe = row + this.canvasSamples.bottom[i] + slopeTestSample;
|
||||
|
||||
@ -1349,15 +1441,17 @@ export class Aard {
|
||||
|| imageData[xs + 1] > this.testResults.blackThreshold
|
||||
|| imageData[xs + 2] > this.testResults.blackThreshold
|
||||
) {
|
||||
imageData[xs + 3] = GlDebugType.SlopeTestDarkViolation;
|
||||
this.canvasSamples.bottom[i1] = -1;
|
||||
i += 2;
|
||||
break;
|
||||
}
|
||||
imageData[xs + 3] = GlDebugType.SlopeTestDarkOk;
|
||||
xs += 4;
|
||||
}
|
||||
|
||||
if (this.canvasSamples.bottom[i1]) {
|
||||
this.canvasSamples.bottom[i1] = height - this.canvasSamples.bottom[i1];
|
||||
this.canvasSamples.bottom[i1] = this.canvasSamples.bottom[i1];
|
||||
}
|
||||
|
||||
i += 2;
|
||||
|
@ -65,7 +65,7 @@ export class GlCanvas {
|
||||
private set gl(x: WebGLRenderingContext) {
|
||||
this._context = x;
|
||||
};
|
||||
private get gl(): WebGLRenderingContext {
|
||||
protected get gl(): WebGLRenderingContext {
|
||||
return this._context;
|
||||
}
|
||||
|
||||
@ -80,7 +80,7 @@ export class GlCanvas {
|
||||
|
||||
private buffers: GlCanvasBuffers;
|
||||
private texture: WebGLTexture;
|
||||
private programInfo: GlCanvasProgramInfo;
|
||||
protected programInfo: GlCanvasProgramInfo;
|
||||
private projectionMatrix: mat4;
|
||||
|
||||
get width() {
|
||||
@ -103,7 +103,7 @@ export class GlCanvas {
|
||||
* Draws video frame to the GL canvas
|
||||
* @param video video to extract a frame from
|
||||
*/
|
||||
drawVideoFrame(video: HTMLVideoElement): void {
|
||||
drawVideoFrame(video: HTMLVideoElement | HTMLCanvasElement): void {
|
||||
this.updateTexture(video);
|
||||
this.drawScene();
|
||||
}
|
||||
@ -156,7 +156,16 @@ export class GlCanvas {
|
||||
);
|
||||
|
||||
if (!this.gl) {
|
||||
throw new Error('WebGL not supported');
|
||||
try {
|
||||
this.gl = this.canvas.getContext(
|
||||
"webgl",
|
||||
{
|
||||
preserveDrawingBuffer: true
|
||||
}
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error('WebGL not supported');
|
||||
}
|
||||
}
|
||||
if(options.id) {
|
||||
this.canvas.setAttribute('id', options.id);
|
||||
@ -212,24 +221,30 @@ export class GlCanvas {
|
||||
this.frameBuffer = new Uint8Array(this.frameBufferSize);
|
||||
}
|
||||
|
||||
private loadShader(type, source) {
|
||||
protected loadShader(type, source) {
|
||||
const shader = this.gl.createShader(type);
|
||||
this.gl.shaderSource(shader, source);
|
||||
this.gl.compileShader(shader);
|
||||
|
||||
// TODO: warn if shader failed to compile
|
||||
if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) {
|
||||
this.gl.deleteShader(shader);
|
||||
console.warn('DEBUG: Shader Compilation Error: ', type, this.gl.getShaderInfoLog(shader), '(cheat sheet: vertex shaders:', this.gl.VERTEX_SHADER, ')');
|
||||
return null;
|
||||
}
|
||||
|
||||
return shader;
|
||||
}
|
||||
|
||||
private initShaderProgram() {
|
||||
protected loadShaders() {
|
||||
const vertexShader = this.loadShader(this.gl.VERTEX_SHADER, vsSource);
|
||||
const fragmentShader = this.loadShader(this.gl.FRAGMENT_SHADER, fsSource);
|
||||
|
||||
return {vertexShader, fragmentShader};
|
||||
}
|
||||
|
||||
private initShaderProgram() {
|
||||
const {vertexShader, fragmentShader} = this.loadShaders();
|
||||
|
||||
// Create the shader program
|
||||
const shaderProgram = this.gl.createProgram();
|
||||
this.gl.attachShader(shaderProgram, vertexShader);
|
||||
@ -238,6 +253,7 @@ export class GlCanvas {
|
||||
|
||||
// TODO: maybe give a warning if program failed to initialize
|
||||
if (!this.gl.getProgramParameter(shaderProgram, this.gl.LINK_STATUS)) {
|
||||
console.warn('DEBUG — FAILED TO LINK SHADER PROGRAM', this.gl.getProgramInfoLog(shaderProgram))
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -279,7 +295,7 @@ export class GlCanvas {
|
||||
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
|
||||
}
|
||||
|
||||
private updateTexture(video: HTMLVideoElement) {
|
||||
protected updateTexture(video: HTMLVideoElement | HTMLCanvasElement | null) {
|
||||
const level = 0;
|
||||
const internalFormat = this.gl.RGBA;
|
||||
const srcFormat = this.gl.RGBA;
|
||||
@ -333,7 +349,7 @@ export class GlCanvas {
|
||||
this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition);
|
||||
}
|
||||
|
||||
private drawScene(): void {
|
||||
protected drawScene(): void {
|
||||
/**
|
||||
* Since we are drawing our frames in a way such that the entire canvas is
|
||||
* always covered by rendered video, and given our video is the only object
|
||||
|
158
src/ext/lib/aard/gl/GlDebugCanvas.ts
Normal file
158
src/ext/lib/aard/gl/GlDebugCanvas.ts
Normal file
@ -0,0 +1,158 @@
|
||||
import { GlCanvas, GlCanvasOptions } from './GlCanvas';
|
||||
|
||||
|
||||
const vsSource = `
|
||||
attribute vec4 aVertexPosition;
|
||||
attribute vec3 aVertexNormal;
|
||||
attribute vec2 aTextureCoord;
|
||||
|
||||
uniform mat4 uNormalMatrix;
|
||||
uniform mat4 uModelViewMatrix;
|
||||
uniform mat4 uProjectionMatrix;
|
||||
|
||||
varying highp vec2 vTextureCoord;
|
||||
|
||||
void main(void) {
|
||||
gl_Position = uProjectionMatrix * aVertexPosition;
|
||||
vTextureCoord = vec2(aTextureCoord.x, 1.0 - aTextureCoord.y);
|
||||
// vTextureCoord = aTextureCoord;
|
||||
}
|
||||
`;
|
||||
|
||||
const fSource = `
|
||||
precision mediump float;
|
||||
|
||||
uniform sampler2D u_texture;
|
||||
// uniform sampler1D u_colorTexture; // Array of replacement colors
|
||||
uniform vec3 u_colors[16];
|
||||
varying vec2 vTextureCoord;
|
||||
|
||||
void main() {
|
||||
vec4 color = texture2D(u_texture, vTextureCoord);
|
||||
int alphaIndex = int(color.a * 255.0);
|
||||
|
||||
if (alphaIndex == 255) { // convert to grayscale on normal alpha
|
||||
float gray = dot(color.rgb, vec3(0.299, 0.587, 0.114));
|
||||
gl_FragColor = vec4(vec3(gray), 1.0);
|
||||
} else if (alphaIndex < 16) { // use custom color where possible
|
||||
// no 1D textures in webgl, only webgl2 maybe
|
||||
// vec3 selectedColor = texture1D(u_colorTexture, float(alphaIndex) / 15.0).rgb;
|
||||
// gl_FragColor = vec4(selectedColor, 1.0);
|
||||
|
||||
vec3 selectedColor;
|
||||
if (alphaIndex == 0) selectedColor = u_colors[0];
|
||||
else if (alphaIndex == 1) selectedColor = u_colors[1];
|
||||
else if (alphaIndex == 2) selectedColor = u_colors[2];
|
||||
else if (alphaIndex == 3) selectedColor = u_colors[3];
|
||||
else if (alphaIndex == 4) selectedColor = u_colors[4];
|
||||
else if (alphaIndex == 5) selectedColor = u_colors[5];
|
||||
else if (alphaIndex == 6) selectedColor = u_colors[6];
|
||||
else if (alphaIndex == 7) selectedColor = u_colors[7];
|
||||
else if (alphaIndex == 8) selectedColor = u_colors[8];
|
||||
else if (alphaIndex == 9) selectedColor = u_colors[9];
|
||||
else if (alphaIndex == 10) selectedColor = u_colors[10];
|
||||
else if (alphaIndex == 11) selectedColor = u_colors[11];
|
||||
else if (alphaIndex == 12) selectedColor = u_colors[12];
|
||||
else if (alphaIndex == 13) selectedColor = u_colors[13];
|
||||
else if (alphaIndex == 14) selectedColor = u_colors[14];
|
||||
else selectedColor = u_colors[15];
|
||||
|
||||
gl_FragColor = vec4(selectedColor, 1.0);
|
||||
} else { // red channel only as fallback
|
||||
gl_FragColor = vec4(color.r, 0.0, 0.0, 1.0);
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export enum GlDebugType {
|
||||
BlackLevelSample = 0,
|
||||
GuardLineOk = 1,
|
||||
GuardLineViolation = 2,
|
||||
GuardLineCornerOk = 3,
|
||||
GuardLineCornerViolation = 4,
|
||||
ImageLineThresholdReached = 5,
|
||||
ImageLineOk = 6,
|
||||
ImageLineFail = 7,
|
||||
EdgeScanProbe = 8,
|
||||
EdgeScanHit = 9,
|
||||
SlopeTestDarkOk = 10,
|
||||
SlopeTestDarkViolation = 11,
|
||||
|
||||
}
|
||||
|
||||
export class GlDebugCanvas extends GlCanvas {
|
||||
|
||||
private debugColors = [
|
||||
0.1, 0.1, 0.25, // 0 - black level sample
|
||||
0.3, 1.0, 0.6, // 1 - guard line ok
|
||||
1.0, 0.1, 0.1, // 2 - guard line violation
|
||||
0.2, 0.6, 0.4, // 3 - guard line corner ok
|
||||
0.5, 0.0, 0.0, // 4 - guard line corner violation
|
||||
1.0, 1.0, 1.0, // 5 - image line threshold reached (stop checking)
|
||||
0.7, 0.7, 0.7, // 6 - image line ok
|
||||
0.9, 0.6, 0.6, // 7 - image line fail
|
||||
0.1, 0.1, 0.4, // 8 - edge scan probe
|
||||
0.4, 0.4, 1.0, // 9 - edge scan hit
|
||||
0.2, 0.4, 0.6, // 10 - slope test ok
|
||||
1.0, 0.0, 0.0, // 11 - slope test fail
|
||||
0.0, 0.0, 0.0, // 12
|
||||
0.0, 0.0, 0.0,
|
||||
0.0, 0.0, 0.0,
|
||||
0.0, 0.0, 0.0,
|
||||
];
|
||||
|
||||
constructor (options: GlCanvasOptions) {
|
||||
super(options);
|
||||
this.canvas.id = options.id;
|
||||
}
|
||||
|
||||
protected loadShaders() {
|
||||
const vertexShader = this.loadShader(this.gl.VERTEX_SHADER, vsSource);
|
||||
const fragmentShader = this.loadShader(this.gl.FRAGMENT_SHADER, fSource);
|
||||
|
||||
return {vertexShader, fragmentShader};
|
||||
}
|
||||
|
||||
show() {
|
||||
this.gl.useProgram(this.programInfo.program)
|
||||
this.gl.uniform3fv((this.programInfo.uniformLocations as any).debugColors, this.debugColors);
|
||||
|
||||
this.canvas.style.position = 'fixed';
|
||||
this.canvas.style.top = '0';
|
||||
this.canvas.style.right = '0';
|
||||
this.canvas.style.zIndex = '99999999';
|
||||
this.canvas.style.transform = 'scale(3)';
|
||||
this.canvas.style.transformOrigin = 'top right';
|
||||
this.canvas.style.imageRendering = 'pixelated';
|
||||
|
||||
document.body.appendChild(
|
||||
this.canvas
|
||||
);
|
||||
}
|
||||
|
||||
drawBuffer(buffer: Uint8Array) {
|
||||
this.updateTextureBuffer(buffer);
|
||||
}
|
||||
|
||||
protected initWebgl() {
|
||||
super.initWebgl();
|
||||
|
||||
(this.programInfo.uniformLocations as any).debugColors = this.gl.getUniformLocation(this.programInfo.program, 'u_colors');
|
||||
}
|
||||
|
||||
protected updateTextureBuffer(buffer: Uint8Array) {
|
||||
// this.updateTexture(null);
|
||||
this.gl.texSubImage2D(
|
||||
this.gl.TEXTURE_2D,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
this.width,
|
||||
this.height,
|
||||
this.gl.RGBA,
|
||||
this.gl.UNSIGNED_BYTE,
|
||||
buffer
|
||||
);
|
||||
this.drawScene();
|
||||
};
|
||||
}
|
83
src/ext/lib/aard/gl/gl-debug-init.ts
Normal file
83
src/ext/lib/aard/gl/gl-debug-init.ts
Normal file
@ -0,0 +1,83 @@
|
||||
export interface GlCanvasBuffers {
|
||||
position: WebGLBuffer,
|
||||
normal: WebGLBuffer,
|
||||
textureCoord: WebGLBuffer,
|
||||
indices: WebGLBuffer,
|
||||
};
|
||||
|
||||
export function initBuffers(gl: WebGLRenderingContext): GlCanvasBuffers {
|
||||
const positionBuffer = initPositionBuffer(gl);
|
||||
const textureCoordBuffer = initTextureBuffer(gl);
|
||||
const indexBuffer = initIndexBuffer(gl);
|
||||
const normalBuffer = initNormalBuffer(gl);
|
||||
|
||||
return {
|
||||
position: positionBuffer,
|
||||
normal: normalBuffer,
|
||||
textureCoord: textureCoordBuffer,
|
||||
indices: indexBuffer,
|
||||
};
|
||||
}
|
||||
|
||||
function initPositionBuffer(gl: WebGLRenderingContext) {
|
||||
const positionBuffer = gl.createBuffer();
|
||||
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
|
||||
const positions = [
|
||||
-1, -1, 1, -1, -1, 1,
|
||||
-1, 1, 1, -1, 1, 1
|
||||
];
|
||||
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
|
||||
|
||||
return positionBuffer;
|
||||
}
|
||||
|
||||
function initIndexBuffer(gl: WebGLRenderingContext) {
|
||||
const indexBuffer = gl.createBuffer();
|
||||
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
|
||||
|
||||
const indices = [
|
||||
0, 1, 2,
|
||||
0, 2, 3,
|
||||
];
|
||||
|
||||
gl.bufferData(
|
||||
gl.ELEMENT_ARRAY_BUFFER,
|
||||
new Uint16Array(indices),
|
||||
gl.STATIC_DRAW
|
||||
);
|
||||
|
||||
return indexBuffer;
|
||||
}
|
||||
|
||||
function initTextureBuffer(gl: WebGLRenderingContext) {
|
||||
const textureCoordBuffer = gl.createBuffer();
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, textureCoordBuffer);
|
||||
|
||||
const texCoordBuffer = gl.createBuffer();
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
|
||||
const texCoords = [
|
||||
0, 1, 1, 1, 0, 0,
|
||||
0, 0, 1, 1, 1, 0
|
||||
];
|
||||
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW);
|
||||
|
||||
return textureCoordBuffer;
|
||||
}
|
||||
|
||||
function initNormalBuffer(gl: WebGLRenderingContext) {
|
||||
const normalBuffer = gl.createBuffer();
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, normalBuffer);
|
||||
|
||||
const vertexNormals = [
|
||||
0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0,
|
||||
];
|
||||
|
||||
gl.bufferData(
|
||||
gl.ARRAY_BUFFER,
|
||||
new Float32Array(vertexNormals),
|
||||
gl.STATIC_DRAW
|
||||
);
|
||||
|
||||
return normalBuffer;
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
import { GlCanvas } from '../gl/GlCanvas';
|
||||
import { GlDebugCanvas } from '../gl/GlDebugCanvas';
|
||||
|
||||
export interface AardCanvasStore {
|
||||
main: GlCanvas;
|
||||
debug?: GlDebugCanvas;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user