Compare commits

..

No commits in common. "c15d7ab28e1377498a9c960bd6380b2c38a6b47b" and "e56d32180cd336f4b615ab2c87bdc571cd6bc1fc" have entirely different histories.

15 changed files with 226 additions and 975 deletions

View File

@ -1,666 +0,0 @@
import AspectRatioType from '../../../common/enums/AspectRatioType.enum';
import EventBus from '../EventBus';
import Logger from '../Logger';
import Settings from '../Settings';
import VideoData from '../video-data/VideoData';
import { Corner } from './enums/corner.enum';
import { GlCanvas } from './gl/GlCanvas';
import { AardCanvasStore } from './interfaces/aard-canvas-store.interface';
import { AardStatus, initAardStatus } from './interfaces/aard-status.interface';
import { AardTestResults, initAardTestResults } from './interfaces/aard-test-results.interface';
import { AardTimers, initAardTimers } from './interfaces/aard-timers.interface';
// Automatic Aspect Ratio Detector
// Here's how it works:
/**
* [ ] Draw frame to canvas
* |
* |
* | 0. A few things about the frame.
* | We imagine that the frame is divided in three regions.
* | Edge regions span from left and right edges towards the
* | center of the frame. The border between edge region and
* | center region is user-defined. We trust center region
* | more than we trust the edge regions, because edge regions
* | may contain a watermark that we don't give a single fuck
* | about. To make things simple, we'll assume that each video
* | gets at most one (1) watermark, which appears in at most
* | one (1) of the four edge panels.
* |
* | EDGE REGION % CENTER REGION % EDGE REGION
* |
* | +----------------+---------------------+----------------+
* | | : : |
* | | : : |
* | | : v i d e o : |
* | | . . . . . . . . . . . . . . . . . . . . . . . . . . . |
* | | : f r a m e : |
* | | : : |
* | | : : |
* | +----------------+---------------------+----------------+
* |
* |
* |
* | 1. Do some fast tests in order to determine whether
* | we need to run the more expensive tests.
* | (admittedly, letterbox shrink and grow tests might
* | be more expensive than full edge detection, but
* | _probably_ not due to data locality)
* |
* |
* V 🔶 Check pixels in the corner. Are they black?
* < > no +
* | V
* yes no black bars, nothing to be done > 🛑 END
* |
* V 🔶 Did we detect black bars yet?
* < > yes +
* | | Check pixels at the inner edge of the
* | 🔶 black bar. Are there any non-black pixels?
* no < > no >+
* | | |
* | yes |
* | | |
* | | Are those non-black pixels near the |
* V V 🔶 one of the edges & within tolerance? V
* | < > yes, only at one edge >+
* | | |
* | no, there's too many non-black pixels / |
* | non-black pixels are in the center |
* | | |
* +< Aspect ratio definitely changed |
* | |
* | Check pixels at the outer edges of the |
* | area of the frame we think should con- |
* V tain the image. Are at least ??% of |
* | those pixels non-black? 🔶V
* | no < >
* | | |
* +<< Aspect ratio probably changed yes
* | |
* | V
* | Aspect ratio is same as before
* | |
* | 2. If we came this far, we need to run aspect |
* | ratio detection. |
* | |
* | |
* | |
* V |
* SCAN FRAME, COLUMN BY COLUMN |
* FOR EACH COLUMN: V |
* : | : |
* : Check every pixel from the edge towards : |
* : the center of the frame. Continue until : |
* : reaching the first non-black pixel. : |
* : | : |
* : | Did we reach a non-black pixel : |
* : V 🔶before reaching center of frame? : V
* : no < > yes : |
* : V | : |
* : Mark column as V : |
* : invalid Check pixels to the left and right : |
* : of detected pixel within certain : |
* : distance : |
* : | : |
* : V 🔶 Are all pixels non-black? : V
* : yes < > no : |
* : | V : |
* : | Mark column as invalid. : |
* : V : |
* : Check pixels to the left and right of detected : |
* : pixel within certain distance, but in the last : |
* : row without detection : |
* : | : |
* : V 🔶 Are all pixels black? ...... V
* : yes < > no : |
* : V | : |
* : | V : |
* : | Mark column as invalid : |
* : | : |
* : | Is pixel significantly brighter : |
* : V 🔶 than the black level? : V
* : yes < > no : |
* : | | : |
* : | Run gradient detection. : |
* : | | : |
* : | V 🔶 Is gradient detected? : V
* : | no < > yes : |
* : V V | : |
* : Record the detected V : |
* : position Mark column as invalid : |
* - - - - | - - - - - - - - - - - - - |
* V |
* Process image positions we detected, separately |
* for upper and lower bars. |
* Topmost detection is considered the only valid |
* option (unless in edge section under certain |
* circumstances). If that position appears more |
* than once, that increases our confidence. |
* | |
* V 🔶 Are we confident enough? V
* yes < > no |
* V | |
* Aspect ratio is certain Aspect ratio not certain |
* | | |
* | | |
* | | |
* | | |
* : : |
* |
* 2. Sometimes, there might be subtitles hardcoded in |
* the black bars. If user enables this feature, then V
* presence of subtitle should invalidate |
* TODO: IMPLEMENT THIS |
* |
* : : |
* | | |
* | | |
* V 🔶 Is subtitle detection enabled? 🔶 V \/
* < > no no < > Detect |
* | V V | Sub? 🔶 V
* | Apply aspect Do nothing <]|[ no < >
* | ratio | | |
* yes +> 🛑 END <+ yes yes
* | | |
* | | |
* V V V
* RUN SUBTITLE DETECTION RUN SUBTITLE DETECTION
* V V V
* - - - - - - - - - - - - - - - - - - - - -
* : [ ] :
* : | :
* : V :
* : Draw only the letterbox region of the video to :
* : a separate canvas and check a few rows. :
* : | :
* : | Are there any non-black pixels :
* : V 🔶 in sampled rows of letterbox? :
* : < > no + :
* : | | Were we certain about aspect :
* : yes V 🔶 ratio before checking for subs? :
* : | < > no > Do nothing :
* : | | V :
* : | yes > Apply or keep -+-> 🛑 END :
* : | aspect ratio :
* : | :
* : ++ Were we confident enough about :
* : V 🔶 detected aspect ratio :
* : no < > yes :
* : | | :
* : | Stop automatic aspect ratio detection :
* : V V :
* : Reset aspect ratio to original :
* : | :
* : V :
* : 🛑 END :
* - - - - - - - - - - - - - - - - - - - - -
*
*/
class Aard {
//#region configuration parameters
private logger: Logger;
private conf: VideoData;
private settings: Settings;
private eventBus: EventBus;
private arid: string;
private eventBusCommands = {
// 'get-aard-timing': [{
// function: () => this.handlePerformanceDataRequest()
// }]
};
//#endregion
private video: HTMLVideoElement;
private animationFrame: number;
//#region internal state
public status: AardStatus = initAardStatus();
private timers: AardTimers = initAardTimers();
private canvasStore: AardCanvasStore;
private testResults: AardTestResults = initAardTestResults();
//#endregion
//#region getters
get defaultAr() {
if (!this.video) {
return undefined;
}
const ratio = this.video.videoWidth / this.video.videoHeight;
if (isNaN(ratio)) {
return undefined;
}
return ratio;
}
//#endregion getters
//#region lifecycle
constructor(videoData: VideoData){
this.logger = videoData.logger;
this.conf = videoData;
this.video = videoData.video;
this.settings = videoData.settings;
this.eventBus = videoData.eventBus;
this.initEventBus();
// this.sampleCols = [];
// this.blackLevel = this.settings.active.arDetect.blackbar.blackLevel;
this.arid = (Math.random()*100).toFixed();
// we can tick manually, for debugging
this.logger.log('info', 'init', `[ArDetector::ctor] creating new ArDetector. arid: ${this.arid}`);
}
private initEventBus() {
for (const action in this.eventBusCommands) {
for (const command of this.eventBusCommands[action]) {
this.eventBus.subscribe(action, command);
}
}
}
private init() {
this.canvasStore = {
main: new GlCanvas(new GlCanvas(this.settings.active.arDetect.canvasDimensions.sampleCanvas)),
};
this.start();
}
//#endregion
start() {
if (this.conf.resizer.lastAr.type === AspectRatioType.AutomaticUpdate) {
// ensure first autodetection will run in any case
this.conf.resizer.lastAr = {type: AspectRatioType.AutomaticUpdate, ratio: this.defaultAr};
}
if (this.animationFrame) {
window.cancelAnimationFrame(this.animationFrame);
}
this.status.aardActive = true;
this.animationFrame = window.requestAnimationFrame( (ts: DOMHighResTimeStamp) => this.onAnimationFrame(ts));
// this.logger.log('info', 'debug', `"%c[ArDetect::startLoop] <@${this.arid}> AARD loop started.`, _ard_console_start);
}
private onAnimationFrame(ts: DOMHighResTimeStamp) {
if (this.canTriggerFrameCheck()) {
this.main();
}
this.animationFrame = window.requestAnimationFrame( (ts: DOMHighResTimeStamp) => this.onAnimationFrame(ts));
}
/**
* Main loop for scanning aspect ratio changes
*/
private async main() {
try {
// We abuse a do-while loop to eat our cake (get early returns)
// and have it, too (if we return early, we still execute code
// at the end of this function)
do {
const imageData = await new Promise<Uint8Array>(
resolve => {
this.canvasStore.main.drawVideoFrame(this.video);
resolve(this.canvasStore.main.getImageData());
}
);
this.getBlackLevelFast(
imageData, 3, 1,
this.settings.active.arDetect.canvasDimensions.sampleCanvas.width,
this.settings.active.arDetect.canvasDimensions.sampleCanvas.height
);
if (this.testResults.notLetterbox) {
// TODO: reset aspect ratio to "AR not applied"
this.testResults.lastStage = 1;
break;
}
this.checkLetterboxShrink(
imageData,
this.settings.active.arDetect.canvasDimensions.sampleCanvas.width,
this.settings.active.arDetect.canvasDimensions.sampleCanvas.height
);
} while (false);
// TODO: emit debug values if debugging is enabled
this.testResults.isFinished = true;
} catch (e) {
console.warn('[Ultrawidify] Aspect ratio autodetection crashed for some reason.\n\nsome reason:', e);
}
}
/**
* Checks whether conditions for granting a frame check are fulfilled
* @returns
*/
private canTriggerFrameCheck() {
// if (this._paused || this._halted || this._exited) {
// return false;
// }
// if video was paused & we know that we already checked that frame,
// we will not check it again.
const videoState = this.getVideoPlaybackState();
if (videoState !== VideoPlaybackState.Playing) {
if (this.status.lastVideoStatus === videoState) {
return false;
}
}
this.status.lastVideoStatus = videoState;
if (Date.now() < this.timers.nextFrameCheckTime) {
return false;
}
this.timers.nextFrameCheckTime = Date.now() + this.settings.active.arDetect.timers.playing;
return true;
}
private getVideoPlaybackState(): VideoPlaybackState {
try {
if (this.video.ended) {
return VideoPlaybackState.Ended;
} else if (this.video.paused) {
return VideoPlaybackState.Paused;
} else if (this.video.error) {
return VideoPlaybackState.Error;
} else {
return VideoPlaybackState.Playing;
}
} catch (e) {
this.logger.log('warn', 'debug', `[ArDetect::getVideoPlaybackState] There was an error while determining video playback state.`, e);
return VideoPlaybackState.Error;
}
}
//#region buffer tests
/**
* Get black level of a given frame. We sample black level on very few
* positions just the corners of the frame. If letterboxing or pillarboxing
* exists, then pixels in the corners of the frame should be the blackest
* it gets.
*
* Sampling pattern are four lines, each shooting from its respective corner.
* Value of 'sample' parameter determines how many pixels along this line we
* are going to sample. Offset means how many pixels of those four lines we
* are going to skip before we start sampling.
*
* x 0 1 ... ... x-1
* y × ------------... ...------------ ×
* 0 | 1 1 |
* 1 | 2 2 |
* : | . . :
* : . .
*
* : . . :
* | . . |
* | 2 2 |
* h-1 | 1 1 |
* × ------------... ...------------ ×
*
*
* IMPORTANT NOTES
* <> imageData is one-dimensional array, so we need to account for that.
* <> blackLevel is the darkest brightest subpixel detected
* <> If image has no crop, then this function WILL NOT get the true black level.
* In that case, we don't get an accurate black level, but we know straight
* away that the image is uncropped. If image is uncropped, we can skip other,
* more expensive tests.
*
* @param imageData array of pixels (4 bytes/fields per pixel)
* @param samples number of samples per corner
* @param width width of the frame
* @param height height of the frame
*/
private getBlackLevelFast(imageData: Uint8Array, samples: number, offset: number, width: number, height: number) {
// there's 4 points for each sample, and 3 components for each of the sampling points.
const pixelValues = new Array<number>(samples * 12);
let pvi = 0;
/**
* We should ensure we are accessing pixels in ordered manner in order to
* take advantage of data locality.
*/
const end = offset + samples;
for (let i = offset; i < end; i++) {
const px_r = (i * width * 4) + (i * 4); // red component starts here
pixelValues[pvi++] = imageData[px_r];
pixelValues[pvi++] = imageData[px_r + 1];
pixelValues[pvi++] = imageData[px_r + 2];
const endpx_r = px_r + (width * 4) - (i * 8) - 4; // -4 because 4 bytes per pixel, and - twice the offset to mirror the diagonal
pixelValues[pvi++] = imageData[endpx_r];
pixelValues[pvi++] = imageData[endpx_r + 1];
pixelValues[pvi++] = imageData[endpx_r + 2];
}
// now let's populate the bottom two corners
for (let i = end; i --> offset;) {
const row = height - i - 1; // since first row is 0, last row is height - 1
const px_r = (row * width * 4) + (i * 4);
pixelValues[pvi++] = imageData[px_r];
pixelValues[pvi++] = imageData[px_r + 1];
pixelValues[pvi++] = imageData[px_r + 2];
const endpx_r = px_r + (width * 4) - (i * 8) - 4; // -4 because 4 bytes per pixel, and - twice the offset to mirror the diagonal
pixelValues[pvi++] = imageData[endpx_r];
pixelValues[pvi++] = imageData[endpx_r + 1];
pixelValues[pvi++] = imageData[endpx_r + 2];
}
let min = 255;
let avg = 0;
let p = 0;
for (let i = 0; i < pixelValues.length; i++) {
p = pixelValues[i];
i++;
if (p < pixelValues[i]) {
p = pixelValues[i];
}
i++;
if (p < pixelValues[i]) {
p = pixelValues[i];
}
avg += p;
if (p < min) {
min = p;
}
}
avg = avg / samples * 4;
// TODO: unhardcode these values
this.testResults.notLetterbox = avg > 16;
// only update black level if not letterbox.
// NOTE: but maybe we could, if blackLevel can only get lower than
// the default value.
if (this.testResults.notLetterbox) {
if (min < this.testResults.blackLevel) {
this.testResults.blackLevel = min;
this.testResults.blackThreshold = min + 16;
}
}
}
/**
* Checks if letterbox has shrunk.
* @param imageData
* @param width
* @param height
*/
private checkLetterboxShrink(imageData: Uint8Array, width: number, height: number) {
// can't check guardline if guardline is not set up (correctly)
if (
this.testResults.guardLine.top < 0
|| this.testResults.guardLine.top > height
|| this.testResults.guardLine.bottom < 0
|| this.testResults.guardLine.bottom > height
) {
this.testResults.guardLine.invalidated = true;
return;
}
const cornerViolations = [0,0,0,0];
let subpixelViolation = false;
let edgePosition = 0.25; // TODO: unhardcode and put into settings. Is % of total width
const segmentPixels = width * edgePosition;
const edgeSegmentSize = segmentPixels * 4;
// check the top
{
// no use in doing guardline tests if guardline hasn't been measured yet, or if
// guardline is not defined.
const rowStart = this.testResults.guardLine.top * width * 4;
const firstSegment = rowStart + edgeSegmentSize;
const rowEnd = rowStart + (width * 4) - 4;
const secondSegment = rowEnd - edgeSegmentSize;
let i = rowStart;
while (i < firstSegment) {
subpixelViolation = false;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
if (subpixelViolation) {
cornerViolations[Corner.TopLeft]++;
}
i++; // skip over alpha channel
}
while (i < secondSegment) {
if (i % 4 === 3) {
continue; // don't check alpha
}
if (imageData[i] > this.testResults.blackThreshold) {
this.testResults.guardLine.invalidated = true;
return; // no need to check further,
}
}
while (i < rowEnd) {
subpixelViolation = false;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
if (subpixelViolation) {
cornerViolations[Corner.TopRight]++;
}
i++; // skip over alpha channel
}
}
// check bottom
{
const rowStart = this.testResults.guardLine.bottom * width * 4;
const firstSegment = rowStart + edgeSegmentSize;
const rowEnd = rowStart + (width * 4) - 4;
const secondSegment = rowEnd - edgeSegmentSize;
let i = rowStart;
if (i % 4) {
i += 4 - (i % 4);
}
while (i < firstSegment) {
subpixelViolation = false;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
if (subpixelViolation) {
cornerViolations[Corner.BottomLeft]++;
}
i++; // skip over alpha channel
}
if (i % 4) {
i += 4 - (i % 4);
}
while (i < secondSegment) {
if (i % 4 === 3) {
continue; // don't check alpha
}
if (imageData[i] > this.testResults.blackThreshold) {
this.testResults.guardLine.invalidated = true;
return; // no need to check further,
}
}
if (i % 4) {
i += 4 - (i % 4);
}
while (i < rowEnd) {
subpixelViolation = false;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
subpixelViolation ||= imageData[i++] > this.testResults.blackThreshold;
if (subpixelViolation) {
cornerViolations[Corner.BottomRight]++;
}
i++; // skip over alpha channel
}
}
const maxViolations = segmentPixels * 0.20; // TODO: move the 0.2 threshold into settings
// we won't do a loop for this few elements
// corners with stuff in them will also be skipped in image test
this.testResults.guardLine.cornerViolations[0] = cornerViolations[0] > maxViolations;
this.testResults.guardLine.cornerViolations[1] = cornerViolations[1] > maxViolations;
this.testResults.guardLine.cornerViolations[2] = cornerViolations[2] > maxViolations;
this.testResults.guardLine.cornerViolations[3] = cornerViolations[3] > maxViolations;
const maxInvalidCorners = 1; // TODO: move this into settings — by default, we allow one corner to extend past the
// guard line in order to prevent watermarks/logos from preventing cropping the video
// this works because +true converts to 1 and +false converts to 0
const dirtyCount = +this.testResults.guardLine.cornerViolations[0]
+ +this.testResults.guardLine.cornerViolations[1]
+ +this.testResults.guardLine.cornerViolations[2]
+ +this.testResults.guardLine.cornerViolations[3];
if (dirtyCount > maxInvalidCorners) {
this.testResults.guardLine.invalidated = true;
} else {
this.testResults.guardLine.invalidated = false;
}
}
/**
* Checks if letterbox has grown
* @param imageData
* @param width
* @param height
*/
private checkLetterboxGrow(imageData: Uint8Array, width: number, height: number) {
if (
this.testResults.imageLine.top < 0
|| this.testResults.imageLine.top > height
|| this.testResults.imageLine.bottom < 0
|| this.testResults.imageLine.bottom > height
) {
this.testResults.imageLine.invalidated = true;
return;
}
}
}

View File

@ -1,19 +0,0 @@
import { AardPerformanceMeasurement } from './aard-performance-measurements.enum';
export interface AardPerformanceData {
total: AardPerformanceMeasurement,
theoretical: AardPerformanceMeasurement,
imageDraw: AardPerformanceMeasurement
blackFrameDraw: AardPerformanceMeasurement,
blackFrame: AardPerformanceMeasurement,
fastLetterbox: AardPerformanceMeasurement,
edgeDetect: AardPerformanceMeasurement,
imageDrawCount: number,
blackFrameDrawCount: number,
blackFrameCount: number,
fastLetterboxCount: number,
edgeDetectCount: number,
aardActive: boolean, // whether autodetection is currently running or not
}

View File

@ -1,6 +0,0 @@
export interface AardPerformanceMeasurement {
sampleCount: number,
averageTime: number,
worstTime: number,
stDev: number,
}

View File

@ -1,6 +0,0 @@
export enum Corner {
TopLeft = 0,
TopRight = 1,
BottomLeft = 2,
BottomRight = 3,
}

View File

@ -1,7 +0,0 @@
enum VideoPlaybackState {
NotInitialized,
Playing,
Paused,
Ended,
Error
}

View File

@ -82,12 +82,6 @@ export class GlCanvas {
private programInfo: GlCanvasProgramInfo; private programInfo: GlCanvasProgramInfo;
private projectionMatrix: mat4; private projectionMatrix: mat4;
get width() {
return this.canvas.width;
}
get height() {
return this.canvas.height;
}
constructor(options: GlCanvasOptions) { constructor(options: GlCanvasOptions) {
this.canvas = document.createElement('canvas'); this.canvas = document.createElement('canvas');
@ -123,18 +117,6 @@ export class GlCanvas {
return this.frameBuffer; return this.frameBuffer;
} }
/**
* Cleans up after itself
*/
destroy() {
this.gl.deleteProgram(this.programInfo.program);
this.gl.deleteBuffer(this.buffers.position);
this.gl.deleteBuffer(this.buffers.normal);
this.gl.deleteBuffer(this.buffers.textureCoord);
this.gl.deleteBuffer(this.buffers.indices);
this.gl.deleteTexture(this.texture);
}
private initWebgl() { private initWebgl() {
// Initialize the GL context // Initialize the GL context
this.gl.clearColor(0.0, 0.0, 0.0, 1.0); this.gl.clearColor(0.0, 0.0, 0.0, 1.0);

View File

@ -1,5 +0,0 @@
import { GlCanvas } from '../gl/GlCanvas';
export interface AardCanvasStore {
main: GlCanvas;
}

View File

@ -1,14 +0,0 @@
export interface AardStatus {
aardActive: boolean,
checkInProgress: boolean,
lastVideoStatus: VideoPlaybackState,
}
export function initAardStatus(): AardStatus {
return {
aardActive: false,
checkInProgress: false,
lastVideoStatus: VideoPlaybackState.NotInitialized,
}
}

View File

@ -1,39 +0,0 @@
export interface AardTestResults {
isFinished: boolean,
lastStage: number,
notLetterbox: boolean,
blackLevel: number, // is cumulative
blackThreshold: number, // is cumulative
guardLine: {
top: number, // is cumulative
bottom: number, // is cumulative
invalidated: boolean,
cornerViolations: [boolean, boolean, boolean, boolean],
},
imageLine: {
top: number, // is cumulative
bottom: number, // is cumulative
invalidated: boolean
}
}
export function initAardTestResults(): AardTestResults {
return {
isFinished: true,
lastStage: 0,
notLetterbox: false,
blackLevel: 0,
blackThreshold: 16,
guardLine: {
top: -1,
bottom: -1,
invalidated: false,
cornerViolations: [false, false, false, false],
},
imageLine: {
top: -1,
bottom: -1,
invalidated: false,
}
}
}

View File

@ -1,9 +0,0 @@
export interface AardTimers {
nextFrameCheckTime: number;
}
export function initAardTimers(): AardTimers {
return {
nextFrameCheckTime: 0
};
}

View File

@ -14,7 +14,6 @@ import Logger from '../Logger';
import VideoData from '../video-data/VideoData'; import VideoData from '../video-data/VideoData';
import Settings from '../Settings'; import Settings from '../Settings';
import EventBus from '../EventBus'; import EventBus from '../EventBus';
import { GlCanvas } from '../aard/gl/GlCanvas';
enum VideoPlaybackState { enum VideoPlaybackState {
Playing, Playing,
@ -83,10 +82,11 @@ class ArDetector {
_nextTick: boolean; _nextTick: boolean;
private animationFrameHandle: any; private animationFrameHandle: any;
private attachedCanvas: HTMLCanvasElement;
canvas: HTMLCanvasElement;
private context: CanvasRenderingContext2D;
canvasImageDataRowLength: number; canvasImageDataRowLength: number;
glCanvas: GlCanvas;
private timers = { private timers = {
nextFrameCheckTime: Date.now() nextFrameCheckTime: Date.now()
} }
@ -123,6 +123,7 @@ class ArDetector {
} }
return ratio; return ratio;
} }
//#endregion getters //#endregion getters
//#region debug getters //#region debug getters
@ -188,19 +189,38 @@ class ArDetector {
// //
// [1] initiate canvases // [1] initiate canvases
// //
if (this.glCanvas) {
this.glCanvas.destroy(); if (!cwidth) {
cwidth = this.settings.active.arDetect.canvasDimensions.sampleCanvas.width;
cheight = this.settings.active.arDetect.canvasDimensions.sampleCanvas.height;
} }
this.glCanvas = new GlCanvas(this.settings.active.arDetect.canvasDimensions.sampleCanvas);
if (this.canvas) {
this.canvas.remove();
}
// if (this.blackframeCanvas) {
// this.blackframeCanvas.remove();
// }
// things to note: we'll be keeping canvas in memory only.
this.canvas = document.createElement("canvas");
this.canvas.width = cwidth;
this.canvas.height = cheight;
// this.blackframeCanvas = document.createElement("canvas");
// this.blackframeCanvas.width = this.settings.active.arDetect.canvasDimensions.blackframeCanvas.width;
// this.blackframeCanvas.height = this.settings.active.arDetect.canvasDimensions.blackframeCanvas.height;
this.context = this.canvas.getContext("2d");
// //
// [2] determine places we'll use to sample our main frame // [2] determine places we'll use to sample our main frame
// //
let ncol = this.settings.active.arDetect.sampling.staticCols; let ncol = this.settings.active.arDetect.sampling.staticCols;
let nrow = this.settings.active.arDetect.sampling.staticRows; let nrow = this.settings.active.arDetect.sampling.staticRows;
let colSpacing = this.glCanvas.width / ncol; let colSpacing = this.canvas.width / ncol;
let rowSpacing = (this.glCanvas.height << 2) / nrow; let rowSpacing = (this.canvas.height << 2) / nrow;
this.sampleLines = []; this.sampleLines = [];
this.sampleCols = []; this.sampleCols = [];
@ -224,6 +244,7 @@ class ArDetector {
// //
// [3] do other things setup needs to do // [3] do other things setup needs to do
// //
this.resetBlackLevel(); this.resetBlackLevel();
// if we're restarting ArDetect, we need to do this in order to force-recalculate aspect ratio // if we're restarting ArDetect, we need to do this in order to force-recalculate aspect ratio
@ -232,10 +253,11 @@ class ArDetector {
this.canvasImageDataRowLength = cwidth << 2; this.canvasImageDataRowLength = cwidth << 2;
this.start(); this.start();
// if(Debug.debugCanvas.enabled){
if(Debug.debugCanvas.enabled){
// this.debugCanvas.init({width: cwidth, height: cheight}); // this.debugCanvas.init({width: cwidth, height: cheight});
// DebugCanvas.draw("test marker","test","rect", {x:5, y:5}, {width: 5, height: 5}); // DebugCanvas.draw("test marker","test","rect", {x:5, y:5}, {width: 5, height: 5});
// } }
this.conf.arSetupComplete = true; this.conf.arSetupComplete = true;
} }
@ -369,6 +391,20 @@ class ArDetector {
); );
} }
private attachCanvas(canvas){
if(this.attachedCanvas)
this.attachedCanvas.remove();
// todo: place canvas on top of the video instead of random location
canvas.style.position = "absolute";
canvas.style.left = "200px";
canvas.style.top = "1200px";
canvas.style.zIndex = 10000;
document.getElementsByTagName("body")[0]
.appendChild(canvas);
}
/** /**
* Adds execution time sample for performance metrics * Adds execution time sample for performance metrics
* @param performanceObject * @param performanceObject
@ -713,19 +749,19 @@ class ArDetector {
// aspect ratio and correct our calculations to account for that // aspect ratio and correct our calculations to account for that
const fileAr = this.video.videoWidth / this.video.videoHeight; const fileAr = this.video.videoWidth / this.video.videoHeight;
const canvasAr = this.glCanvas.width / this.glCanvas.height; const canvasAr = this.canvas.width / this.canvas.height;
let widthCorrected; let widthCorrected;
if (edges.top && edges.bottom) { if (edges.top && edges.bottom) {
// in case of letterbox, we take canvas height as canon and assume width got stretched or squished // in case of letterbox, we take canvas height as canon and assume width got stretched or squished
if (fileAr != canvasAr) { if (fileAr != canvasAr) {
widthCorrected = this.glCanvas.height * fileAr; widthCorrected = this.canvas.height * fileAr;
} else { } else {
widthCorrected = this.glCanvas.width; widthCorrected = this.canvas.width;
} }
return widthCorrected / (this.glCanvas.height - letterbox); return widthCorrected / (this.canvas.height - letterbox);
} }
} }
@ -786,24 +822,28 @@ class ArDetector {
return; return;
} }
if (!this.glCanvas) { if (!this.context) {
this.init(); this.init();
} }
let sampleCols = this.sampleCols.slice(0); let sampleCols = this.sampleCols.slice(0);
let startTime = performance.now(); let startTime = performance.now();
const imageData = await new Promise<Uint8Array>( await new Promise<void>(
resolve => { resolve => {
this.glCanvas.drawVideoFrame(this.video); this.context.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height);
resolve(this.glCanvas.getImageData()); resolve();
} }
) )
console.log('image data received ...') const imageData = this.context.getImageData(0, 0, this.canvas.width, this.canvas.height).data;
timerResults.imageDrawTime = performance.now() - startTime; timerResults.imageDrawTime = performance.now() - startTime;
startTime = performance.now(); startTime = performance.now();
const bfAnalysis = await this.blackframeTest(imageData); const bfAnalysis = await this.blackframeTest(imageData);
timerResults.blackFrameProcessTime = performance.now() - startTime; timerResults.blackFrameProcessTime = performance.now() - startTime;
if (bfAnalysis.isBlack) { if (bfAnalysis.isBlack) {
@ -1090,7 +1130,7 @@ class ArDetector {
// returns 'false' if we found a non-black edge pixel. // returns 'false' if we found a non-black edge pixel.
// If we detect anything darker than blackLevel, we modify blackLevel to the new lowest value // If we detect anything darker than blackLevel, we modify blackLevel to the new lowest value
const rowOffset = this.glCanvas.width * (this.glCanvas.height - 1); const rowOffset = this.canvas.width * (this.canvas.height - 1);
let currentMin = 255, currentMax = 0, colOffset_r, colOffset_g, colOffset_b, colOffset_rb, colOffset_gb, colOffset_bb, blthreshold = this.settings.active.arDetect.blackbar.threshold; let currentMin = 255, currentMax = 0, colOffset_r, colOffset_g, colOffset_b, colOffset_rb, colOffset_gb, colOffset_bb, blthreshold = this.settings.active.arDetect.blackbar.threshold;
// detect black level. if currentMax comes above blackbar + blackbar threshold, we know we aren't letterboxed // detect black level. if currentMax comes above blackbar + blackbar threshold, we know we aren't letterboxed

View File

@ -42,7 +42,7 @@ class GuardLine {
// to odstrani vse neveljavne nastavitve in vse možnosti, ki niso smiselne // to odstrani vse neveljavne nastavitve in vse možnosti, ki niso smiselne
// this removes any configs with invalid values or values that dont make sense // this removes any configs with invalid values or values that dont make sense
if (bbTop < 0 || bbBottom >= this.aard.glCanvas.height ){ if (bbTop < 0 || bbBottom >= this.aard.canvas.height ){
throw {error: "INVALID_SETTINGS_IN_GUARDLINE", bbTop, bbBottom} throw {error: "INVALID_SETTINGS_IN_GUARDLINE", bbTop, bbBottom}
} }
@ -99,7 +99,7 @@ class GuardLine {
return { success: true }; return { success: true };
} }
let offset = (this.aard.glCanvas.width * this.settings.active.arDetect.guardLine.ignoreEdgeMargin) << 2; let offset = (this.aard.canvas.width * this.settings.active.arDetect.guardLine.ignoreEdgeMargin) << 2;
let offenders = []; let offenders = [];
let offenderCount = -1; // doing it this way means first offender has offenderCount==0. Ez index. let offenderCount = -1; // doing it this way means first offender has offenderCount==0. Ez index.
@ -117,8 +117,8 @@ class GuardLine {
// <<<=======| checking upper row |========>>> // <<<=======| checking upper row |========>>>
rowStart = ((edge_upper * this.aard.glCanvas.width) << 2) + offset; rowStart = ((edge_upper * this.aard.canvas.width) << 2) + offset;
rowEnd = rowStart + ( this.aard.glCanvas.width << 2 ) - (offset * 2); rowEnd = rowStart + ( this.aard.canvas.width << 2 ) - (offset * 2);
if (Debug.debugCanvas.enabled && Debug.debugCanvas.guardLine) { if (Debug.debugCanvas.enabled && Debug.debugCanvas.guardLine) {
// offenderCount = this._gl_debugRowCheck(image, rowStart, rowEnd, offenders, offenderCount); // offenderCount = this._gl_debugRowCheck(image, rowStart, rowEnd, offenders, offenderCount);
@ -127,8 +127,8 @@ class GuardLine {
} }
// <<<=======| checking lower row |========>>> // <<<=======| checking lower row |========>>>
rowStart = ((edge_lower * this.aard.glCanvas.width) << 2) + offset; rowStart = ((edge_lower * this.aard.canvas.width) << 2) + offset;
rowEnd = rowStart + ( this.aard.glCanvas.width << 2 ) - (offset * 2); rowEnd = rowStart + ( this.aard.canvas.width << 2 ) - (offset * 2);
if (Debug.debugCanvas.enabled && Debug.debugCanvas.guardLine) { if (Debug.debugCanvas.enabled && Debug.debugCanvas.guardLine) {
// offenderCount = this._gl_debugRowCheck(image, rowStart, rowEnd, offenders, offenderCount); // offenderCount = this._gl_debugRowCheck(image, rowStart, rowEnd, offenders, offenderCount);
@ -158,7 +158,7 @@ class GuardLine {
if(!this.imageBar.top || !this.imageBar.bottom) if(!this.imageBar.top || !this.imageBar.bottom)
return { success: false }; return { success: false };
let offset = (this.aard.glCanvas.width * this.settings.active.arDetect.guardLine.ignoreEdgeMargin) << 2; let offset = (this.aard.canvas.width * this.settings.active.arDetect.guardLine.ignoreEdgeMargin) << 2;
// TODO: implement logo check. // TODO: implement logo check.
@ -167,14 +167,14 @@ class GuardLine {
// how many non-black pixels we need to consider this check a success. We only need to detect enough pixels // how many non-black pixels we need to consider this check a success. We only need to detect enough pixels
// on one edge (one of the edges can be black as long as both aren't) // on one edge (one of the edges can be black as long as both aren't)
let successThreshold = (this.aard.glCanvas.width * this.settings.active.arDetect.guardLine.imageTestThreshold); let successThreshold = (this.aard.canvas.width * this.settings.active.arDetect.guardLine.imageTestThreshold);
let rowStart, rowEnd; let rowStart, rowEnd;
// <<<=======| checking upper row |========>>> // <<<=======| checking upper row |========>>>
rowStart = ((edge_upper * this.aard.glCanvas.width) << 2) + offset; rowStart = ((edge_upper * this.aard.canvas.width) << 2) + offset;
rowEnd = rowStart + ( this.aard.glCanvas.width << 2 ) - (offset * 2); rowEnd = rowStart + ( this.aard.canvas.width << 2 ) - (offset * 2);
let res = false; let res = false;
@ -190,7 +190,7 @@ class GuardLine {
// <<<=======| checking lower row |========>>> // <<<=======| checking lower row |========>>>
rowStart = ((edge_lower * this.aard.glCanvas.width) << 2) + offset; rowStart = ((edge_lower * this.aard.canvas.width) << 2) + offset;
// rowEnd = rowStart + ( this.conf.canvas.width << 2 ) - (offset * 2); // rowEnd = rowStart + ( this.conf.canvas.width << 2 ) - (offset * 2);

View File

@ -100,7 +100,7 @@ class EdgeDetect{
if(guardLineOut){ if(guardLineOut){
if(guardLineOut.imageFail && !guardLineOut.blackbarFail && this.conf.guardLine.blackbar.top) { if(guardLineOut.imageFail && !guardLineOut.blackbarFail && this.conf.guardLine.blackbar.top) {
upper_top = this.conf.guardLine.blackbar.top; upper_top = this.conf.guardLine.blackbar.top;
upper_bottom = this.conf.glCanvas.height >> 1; upper_bottom = this.conf.canvas.height >> 1;
lower_top = upper_bottom; lower_top = upper_bottom;
lower_bottom = this.conf.guardLine.blackbar.bottom; lower_bottom = this.conf.guardLine.blackbar.bottom;
} else if (! guardLineOut.imageFail && !guardLineOut.blackbarFail && this.conf.guardLine.blackbar.top) { } else if (! guardLineOut.imageFail && !guardLineOut.blackbarFail && this.conf.guardLine.blackbar.top) {
@ -110,20 +110,20 @@ class EdgeDetect{
// a logo could falsely trigger this case, so we need to add some extra margins past // a logo could falsely trigger this case, so we need to add some extra margins past
// the point marked by guardLine.blackbar. Let's say 1/8 of canvas height on either side. // the point marked by guardLine.blackbar. Let's say 1/8 of canvas height on either side.
upper_top = 0; upper_top = 0;
upper_bottom = this.conf.guardLine.blackbar.top + (this.conf.glCanvas.height >> 3); upper_bottom = this.conf.guardLine.blackbar.top + (this.conf.canvas.height >> 3);
lower_top = this.conf.guardLine.blackbar.bottom - (this.conf.glCanvas.height >> 3); lower_top = this.conf.guardLine.blackbar.bottom - (this.conf.canvas.height >> 3);
lower_bottom = this.conf.glCanvas.height - 1; lower_bottom = this.conf.canvas.height - 1;
} else { } else {
upper_top = 0; upper_top = 0;
upper_bottom = (this.conf.glCanvas.height >> 1) /*- parseInt(this.conf.glCanvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/ upper_bottom = (this.conf.canvas.height >> 1) /*- parseInt(this.conf.canvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/
lower_top = (this.conf.glCanvas.height >> 1) /*+ parseInt(this.conf.glCanvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/ lower_top = (this.conf.canvas.height >> 1) /*+ parseInt(this.conf.canvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/
lower_bottom = this.conf.glCanvas.height - 1; lower_bottom = this.conf.canvas.height - 1;
} }
} else{ } else{
upper_top = 0; upper_top = 0;
upper_bottom = (this.conf.glCanvas.height >> 1) /*- parseInt(this.conf.glCanvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/ upper_bottom = (this.conf.canvas.height >> 1) /*- parseInt(this.conf.canvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/
lower_top = (this.conf.glCanvas.height >> 1) /*+ parseInt(this.conf.glCanvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/ lower_top = (this.conf.canvas.height >> 1) /*+ parseInt(this.conf.canvas.height * this.settings.active.arDetect.edgeDetection.middleIgnoredArea);*/
lower_bottom = this.conf.glCanvas.height - 1; lower_bottom = this.conf.canvas.height - 1;
} }
this.logger.log('info', 'arDetect', '[EdgeDetect::findCandidates] searching for candidates on ranges', upper_top, '<->', upper_bottom, ';', lower_top, '<->', lower_bottom); this.logger.log('info', 'arDetect', '[EdgeDetect::findCandidates] searching for candidates on ranges', upper_top, '<->', upper_bottom, ';', lower_top, '<->', lower_bottom);
@ -157,7 +157,7 @@ class EdgeDetect{
// NOTE: this is very simple and will need to be reworked in case we ever // NOTE: this is very simple and will need to be reworked in case we ever
// go for quorum-based edge detection. (Probably not gonna happen) // go for quorum-based edge detection. (Probably not gonna happen)
const topPoint = { const topPoint = {
row: this.conf.glCanvas.height, row: this.conf.canvas.height,
gradient: false, // does current row have a gradient sample gradient: false, // does current row have a gradient sample
noGradient: false, // does current row have 100% confirmed edge sample noGradient: false, // does current row have 100% confirmed edge sample
} }
@ -220,7 +220,7 @@ class EdgeDetect{
} else { } else {
// if top gradient-only sample is closer to the edge than the bottom sample, // if top gradient-only sample is closer to the edge than the bottom sample,
// validation also fails. Otherwise, we can assume success. // validation also fails. Otherwise, we can assume success.
return (topPoint.row >= this.conf.glCanvas.height - bottomPoint.row); return (topPoint.row >= this.conf.canvas.height - bottomPoint.row);
} }
} }
@ -229,7 +229,7 @@ class EdgeDetect{
// whether gradient-only result of bottom row is closer to the edge than than the top // whether gradient-only result of bottom row is closer to the edge than than the top
// sample. // sample.
if (! bottomPoint.noGradient) { if (! bottomPoint.noGradient) {
return (topPoint.row < this.conf.glCanvas.height - bottomPoint.row); return (topPoint.row < this.conf.canvas.height - bottomPoint.row);
} }
return false; return false;
@ -240,8 +240,8 @@ class EdgeDetect{
let edgeCandidatesBottom = {count: 0}; let edgeCandidatesBottom = {count: 0};
let detections; let detections;
let canvasWidth = this.conf.glCanvas.width; let canvasWidth = this.conf.canvas.width;
let canvasHeight = this.conf.glCanvas.height; let canvasHeight = this.conf.canvas.height;
let sampleStart, sampleEnd, loopEnd; let sampleStart, sampleEnd, loopEnd;
let sampleRow_black, sampleRow_color; let sampleRow_black, sampleRow_color;
@ -352,7 +352,7 @@ class EdgeDetect{
edgePostprocess(edges){ edgePostprocess(edges){
let edgesTop = []; let edgesTop = [];
let edgesBottom = []; let edgesBottom = [];
let alignMargin = this.conf.glCanvas.height * this.settings.active.arDetect.allowedMisaligned; let alignMargin = this.conf.canvas.height * this.settings.active.arDetect.allowedMisaligned;
let missingEdge = edges.edgeCandidatesTopCount == 0 || edges.edgeCandidatesBottomCount == 0; let missingEdge = edges.edgeCandidatesTopCount == 0 || edges.edgeCandidatesBottomCount == 0;
@ -377,7 +377,7 @@ class EdgeDetect{
for(let e in edges.edgeCandidatesBottom){ for(let e in edges.edgeCandidatesBottom){
let edge = edges.edgeCandidatesBottom[e]; let edge = edges.edgeCandidatesBottom[e];
edgesBottom.push({ edgesBottom.push({
distance: this.conf.glCanvas.height - edge.offset, distance: this.conf.canvas.height - edge.offset,
absolute: edge.offset, absolute: edge.offset,
count: edge.count count: edge.count
}); });
@ -542,11 +542,11 @@ class EdgeDetect{
blackbarThreshold = this.conf.blackLevel + this.settings.active.arDetect.blackbar.threshold; blackbarThreshold = this.conf.blackLevel + this.settings.active.arDetect.blackbar.threshold;
let middleRowStart = (this.conf.glCanvas.height >> 1) * this.conf.glCanvas.width; let middleRowStart = (this.conf.canvas.height >> 1) * this.conf.canvas.width;
let middleRowEnd = middleRowStart + this.conf.glCanvas.width - 1; let middleRowEnd = middleRowStart + this.conf.canvas.width - 1;
let rowStart = middleRowStart << 2; let rowStart = middleRowStart << 2;
let midpoint = (middleRowStart + (this.conf.glCanvas.width >> 1)) << 2 let midpoint = (middleRowStart + (this.conf.canvas.width >> 1)) << 2
let rowEnd = middleRowEnd << 2; let rowEnd = middleRowEnd << 2;
let edge_left = -1, edge_right = -1; let edge_left = -1, edge_right = -1;
@ -564,7 +564,7 @@ class EdgeDetect{
// check on the right // check on the right
for(let i = rowEnd; i > midpoint; i-= 4){ for(let i = rowEnd; i > midpoint; i-= 4){
if(image[i] > blackbarThreshold || image[i+1] > blackbarThreshold || image[i+2] > blackbarThreshold){ if(image[i] > blackbarThreshold || image[i+1] > blackbarThreshold || image[i+2] > blackbarThreshold){
edge_right = this.conf.glCanvas.width - ((i - rowStart) >> 2); edge_right = this.conf.canvas.width - ((i - rowStart) >> 2);
break; break;
} }
} }