Use gl.LINEAR interpolation for confidence masks

PiperOrigin-RevId: 582777383
This commit is contained in:
Sebastian Schmidt 2023-11-15 13:00:08 -08:00 committed by Copybara-Service
parent 47e217896c
commit 12340a8e82
10 changed files with 98 additions and 63 deletions

View File

@ -30,25 +30,20 @@ if (skip) {
(skip ? xdescribe : describe)('DrawingUtils', () => { (skip ? xdescribe : describe)('DrawingUtils', () => {
let shaderContext = new MPImageShaderContext(); let shaderContext = new MPImageShaderContext();
let canvas2D: HTMLCanvasElement; let canvas2D: OffscreenCanvas;
let context2D: CanvasRenderingContext2D; let context2D: OffscreenCanvasRenderingContext2D;
let drawingUtils2D: DrawingUtils; let drawingUtils2D: DrawingUtils;
let canvasWebGL: HTMLCanvasElement; let canvasWebGL: OffscreenCanvas;
let contextWebGL: WebGL2RenderingContext; let contextWebGL: WebGL2RenderingContext;
let drawingUtilsWebGL: DrawingUtils; let drawingUtilsWebGL: DrawingUtils;
beforeEach(() => { beforeEach(() => {
shaderContext = new MPImageShaderContext(); canvas2D = canvas2D ?? new OffscreenCanvas(WIDTH, HEIGHT);
canvasWebGL = canvasWebGL ?? new OffscreenCanvas(WIDTH, HEIGHT);
canvasWebGL = document.createElement('canvas'); shaderContext = new MPImageShaderContext();
canvasWebGL.width = WIDTH;
canvasWebGL.height = HEIGHT;
contextWebGL = canvasWebGL.getContext('webgl2')!; contextWebGL = canvasWebGL.getContext('webgl2')!;
drawingUtilsWebGL = new DrawingUtils(contextWebGL); drawingUtilsWebGL = new DrawingUtils(contextWebGL);
canvas2D = document.createElement('canvas');
canvas2D.width = WIDTH;
canvas2D.height = HEIGHT;
context2D = canvas2D.getContext('2d')!; context2D = canvas2D.getContext('2d')!;
drawingUtils2D = new DrawingUtils(context2D, contextWebGL); drawingUtils2D = new DrawingUtils(context2D, contextWebGL);
}); });
@ -61,11 +56,11 @@ if (skip) {
describe( describe(
'drawConfidenceMask() blends background with foreground color', () => { 'drawConfidenceMask() blends background with foreground color', () => {
const foreground = new ImageData( const defaultColor = [255, 255, 255, 255];
const overlayImage = new ImageData(
new Uint8ClampedArray( new Uint8ClampedArray(
[0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255]), [0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255]),
WIDTH, HEIGHT); WIDTH, HEIGHT);
const background = [255, 255, 255, 255];
const expectedResult = new Uint8Array([ const expectedResult = new Uint8Array([
255, 255, 255, 255, 178, 178, 178, 255, 102, 102, 102, 255, 0, 0, 0, 255, 255, 255, 255, 178, 178, 178, 255, 102, 102, 102, 255, 0, 0, 0,
255 255
@ -74,48 +69,52 @@ if (skip) {
it('on 2D canvas', () => { it('on 2D canvas', () => {
const confidenceMask = new MPMask( const confidenceMask = new MPMask(
[new Float32Array([0.0, 0.3, 0.6, 1.0])], [new Float32Array([0.0, 0.3, 0.6, 1.0])],
/* interpolateValues= */ true,
/* ownsWebGLTexture= */ false, canvas2D, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, canvas2D, shaderContext, WIDTH,
HEIGHT); HEIGHT);
drawingUtils2D.drawConfidenceMask( drawingUtils2D.drawConfidenceMask(
confidenceMask, background, foreground); confidenceMask, defaultColor, overlayImage);
const actualResult = context2D.getImageData(0, 0, WIDTH, HEIGHT).data; const actualResult = context2D.getImageData(0, 0, WIDTH, HEIGHT).data;
expect(actualResult) expect(actualResult)
.toEqual(new Uint8ClampedArray(expectedResult.buffer)); .toEqual(new Uint8ClampedArray(expectedResult.buffer));
confidenceMask.close();
}); });
it('on WebGL canvas', () => { it('on WebGL canvas', () => {
const confidenceMask = new MPMask( const confidenceMask = new MPMask(
[new Float32Array( [new Float32Array(
[0.6, 1.0, 0.0, 0.3])], // Note: Vertically flipped [0.6, 1.0, 0.0, 0.3])], // Note: Vertically flipped
/* interpolateValues= */ true,
/* ownsWebGLTexture= */ false, canvasWebGL, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, canvasWebGL, shaderContext, WIDTH,
HEIGHT); HEIGHT);
drawingUtilsWebGL.drawConfidenceMask( drawingUtilsWebGL.drawConfidenceMask(
confidenceMask, background, foreground); confidenceMask, defaultColor, overlayImage);
const actualResult = new Uint8Array(WIDTH * HEIGHT * 4); const actualResult = new Uint8Array(WIDTH * HEIGHT * 4);
contextWebGL.readPixels( contextWebGL.readPixels(
0, 0, WIDTH, HEIGHT, contextWebGL.RGBA, 0, 0, WIDTH, HEIGHT, contextWebGL.RGBA,
contextWebGL.UNSIGNED_BYTE, actualResult); contextWebGL.UNSIGNED_BYTE, actualResult);
expect(actualResult).toEqual(expectedResult); expect(actualResult).toEqual(expectedResult);
confidenceMask.close();
}); });
}); });
describe( describe(
'drawConfidenceMask() blends background with foreground image', () => { 'drawConfidenceMask() blends background with foreground image', () => {
const foreground = new ImageData( const defaultImage = new ImageData(
new Uint8ClampedArray(
[0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255]),
WIDTH, HEIGHT);
const background = new ImageData(
new Uint8ClampedArray([ new Uint8ClampedArray([
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255 255, 255, 255
]), ]),
WIDTH, HEIGHT); WIDTH, HEIGHT);
const overlayImage = new ImageData(
new Uint8ClampedArray(
[0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255]),
WIDTH, HEIGHT);
const expectedResult = new Uint8Array([ const expectedResult = new Uint8Array([
255, 255, 255, 255, 178, 178, 178, 255, 102, 102, 102, 255, 0, 0, 0, 255, 255, 255, 255, 178, 178, 178, 255, 102, 102, 102, 255, 0, 0, 0,
255 255
@ -124,32 +123,36 @@ if (skip) {
it('on 2D canvas', () => { it('on 2D canvas', () => {
const confidenceMask = new MPMask( const confidenceMask = new MPMask(
[new Float32Array([0.0, 0.3, 0.6, 1.0])], [new Float32Array([0.0, 0.3, 0.6, 1.0])],
/* interpolateValues= */ true,
/* ownsWebGLTexture= */ false, canvas2D, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, canvas2D, shaderContext, WIDTH,
HEIGHT); HEIGHT);
drawingUtils2D.drawConfidenceMask( drawingUtils2D.drawConfidenceMask(
confidenceMask, background, foreground); confidenceMask, defaultImage, overlayImage);
const actualResult = context2D.getImageData(0, 0, WIDTH, HEIGHT).data; const actualResult = context2D.getImageData(0, 0, WIDTH, HEIGHT).data;
expect(actualResult) expect(actualResult)
.toEqual(new Uint8ClampedArray(expectedResult.buffer)); .toEqual(new Uint8ClampedArray(expectedResult.buffer));
confidenceMask.close();
}); });
it('on WebGL canvas', () => { it('on WebGL canvas', () => {
const confidenceMask = new MPMask( const confidenceMask = new MPMask(
[new Float32Array( [new Float32Array(
[0.6, 1.0, 0.0, 0.3])], // Note: Vertically flipped [0.6, 1.0, 0.0, 0.3])], // Note: Vertically flipped
/* interpolateValues= */ true,
/* ownsWebGLTexture= */ false, canvasWebGL, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, canvasWebGL, shaderContext, WIDTH,
HEIGHT); HEIGHT);
drawingUtilsWebGL.drawConfidenceMask( drawingUtilsWebGL.drawConfidenceMask(
confidenceMask, background, foreground); confidenceMask, defaultImage, overlayImage);
const actualResult = new Uint8Array(WIDTH * HEIGHT * 4); const actualResult = new Uint8Array(WIDTH * HEIGHT * 4);
contextWebGL.readPixels( contextWebGL.readPixels(
0, 0, WIDTH, HEIGHT, contextWebGL.RGBA, 0, 0, WIDTH, HEIGHT, contextWebGL.RGBA,
contextWebGL.UNSIGNED_BYTE, actualResult); contextWebGL.UNSIGNED_BYTE, actualResult);
expect(actualResult).toEqual(expectedResult); expect(actualResult).toEqual(expectedResult);
confidenceMask.close();
}); });
}); });
@ -167,6 +170,7 @@ if (skip) {
it('on 2D canvas', () => { it('on 2D canvas', () => {
const categoryMask = new MPMask( const categoryMask = new MPMask(
[new Uint8Array([0, 1, 2, 3])], [new Uint8Array([0, 1, 2, 3])],
/* interpolateValues= */ false,
/* ownsWebGLTexture= */ false, canvas2D, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, canvas2D, shaderContext, WIDTH,
HEIGHT); HEIGHT);
@ -175,11 +179,13 @@ if (skip) {
const actualResult = context2D.getImageData(0, 0, WIDTH, HEIGHT).data; const actualResult = context2D.getImageData(0, 0, WIDTH, HEIGHT).data;
expect(actualResult) expect(actualResult)
.toEqual(new Uint8ClampedArray(expectedResult.buffer)); .toEqual(new Uint8ClampedArray(expectedResult.buffer));
categoryMask.close();
}); });
it('on WebGL canvas', () => { it('on WebGL canvas', () => {
const categoryMask = new MPMask( const categoryMask = new MPMask(
[new Uint8Array([2, 3, 0, 1])], // Note: Vertically flipped [new Uint8Array([2, 3, 0, 1])], // Note: Vertically flipped
/* interpolateValues= */ false,
/* ownsWebGLTexture= */ false, canvasWebGL, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, canvasWebGL, shaderContext, WIDTH,
HEIGHT); HEIGHT);
@ -190,6 +196,7 @@ if (skip) {
0, 0, WIDTH, HEIGHT, contextWebGL.RGBA, contextWebGL.UNSIGNED_BYTE, 0, 0, WIDTH, HEIGHT, contextWebGL.RGBA, contextWebGL.UNSIGNED_BYTE,
actualResult); actualResult);
expect(actualResult).toEqual(expectedResult); expect(actualResult).toEqual(expectedResult);
categoryMask.close();
}); });
}); });

View File

@ -419,6 +419,7 @@ export class DrawingUtils {
const convertedMask = new MPMask( const convertedMask = new MPMask(
[data], [data],
mask.interpolateValues,
/* ownsWebGlTexture= */ false, /* ownsWebGlTexture= */ false,
gl.canvas, gl.canvas,
this.convertToWebGLTextureShaderContext, this.convertToWebGLTextureShaderContext,

View File

@ -92,11 +92,15 @@ export class CategoryMaskShaderContext extends MPImageShaderContext {
colorMap: Map<number, number[]>|number[][]) { colorMap: Map<number, number[]>|number[][]) {
const gl = this.gl!; const gl = this.gl!;
// Bind category mask
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, categoryMask);
// TODO: We should avoid uploading textures from CPU to GPU // TODO: We should avoid uploading textures from CPU to GPU
// if the textures haven't changed. This can lead to drastic performance // if the textures haven't changed. This can lead to drastic performance
// slowdowns (~50ms per frame). Users can reduce the penalty by passing a // slowdowns (~50ms per frame). Users can reduce the penalty by passing a
// canvas object instead of ImageData/HTMLImageElement. // canvas object instead of ImageData/HTMLImageElement.
gl.activeTexture(gl.TEXTURE0); gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, this.backgroundTexture!); gl.bindTexture(gl.TEXTURE_2D, this.backgroundTexture!);
gl.texImage2D( gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, background); gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, background);
@ -117,19 +121,15 @@ export class CategoryMaskShaderContext extends MPImageShaderContext {
pixels[index * 4 + 2] = rgba[2]; pixels[index * 4 + 2] = rgba[2];
pixels[index * 4 + 3] = rgba[3]; pixels[index * 4 + 3] = rgba[3];
}); });
gl.activeTexture(gl.TEXTURE1); gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, this.colorMappingTexture!); gl.bindTexture(gl.TEXTURE_2D, this.colorMappingTexture!);
gl.texImage2D( gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, 256, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, gl.TEXTURE_2D, 0, gl.RGBA, 256, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Uint8Array(pixels)); new Uint8Array(pixels));
} else { } else {
gl.activeTexture(gl.TEXTURE1); gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, this.colorMappingTexture!); gl.bindTexture(gl.TEXTURE_2D, this.colorMappingTexture!);
} }
// Bind category mask
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, categoryMask);
} }
unbindTextures() { unbindTextures() {
@ -148,10 +148,11 @@ export class CategoryMaskShaderContext extends MPImageShaderContext {
protected override setupTextures(): void { protected override setupTextures(): void {
const gl = this.gl!; const gl = this.gl!;
gl.activeTexture(gl.TEXTURE0); gl.activeTexture(gl.TEXTURE1);
this.backgroundTexture = this.createTexture(gl, gl.LINEAR); this.backgroundTexture = this.createTexture(gl, gl.LINEAR);
// Use `gl.NEAREST` to prevent interpolating values in our category to // Use `gl.NEAREST` to prevent interpolating values in our category to
// color map. // color map.
gl.activeTexture(gl.TEXTURE2);
this.colorMappingTexture = this.createTexture(gl, gl.NEAREST); this.colorMappingTexture = this.createTexture(gl, gl.NEAREST);
} }
@ -172,9 +173,9 @@ export class CategoryMaskShaderContext extends MPImageShaderContext {
protected override configureUniforms(): void { protected override configureUniforms(): void {
super.configureUniforms(); super.configureUniforms();
const gl = this.gl!; const gl = this.gl!;
gl.uniform1i(this.backgroundTextureUniform!, 0); gl.uniform1i(this.maskTextureUniform!, 0);
gl.uniform1i(this.colorMappingTextureUniform!, 1); gl.uniform1i(this.backgroundTextureUniform!, 1);
gl.uniform1i(this.maskTextureUniform!, 2); gl.uniform1i(this.colorMappingTextureUniform!, 2);
} }
override close(): void { override close(): void {

View File

@ -51,9 +51,9 @@ export class ConfidenceMaskShaderContext extends MPImageShaderContext {
protected override setupTextures(): void { protected override setupTextures(): void {
const gl = this.gl!; const gl = this.gl!;
gl.activeTexture(gl.TEXTURE0);
this.defaultTexture = this.createTexture(gl);
gl.activeTexture(gl.TEXTURE1); gl.activeTexture(gl.TEXTURE1);
this.defaultTexture = this.createTexture(gl);
gl.activeTexture(gl.TEXTURE2);
this.overlayTexture = this.createTexture(gl); this.overlayTexture = this.createTexture(gl);
} }
@ -74,9 +74,9 @@ export class ConfidenceMaskShaderContext extends MPImageShaderContext {
protected override configureUniforms(): void { protected override configureUniforms(): void {
super.configureUniforms(); super.configureUniforms();
const gl = this.gl!; const gl = this.gl!;
gl.uniform1i(this.defaultTextureUniform!, 0); gl.uniform1i(this.maskTextureUniform!, 0);
gl.uniform1i(this.overlayTextureUniform!, 1); gl.uniform1i(this.defaultTextureUniform!, 1);
gl.uniform1i(this.maskTextureUniform!, 2); gl.uniform1i(this.overlayTextureUniform!, 2);
} }
bindAndUploadTextures( bindAndUploadTextures(
@ -88,17 +88,17 @@ export class ConfidenceMaskShaderContext extends MPImageShaderContext {
// canvas object instead of ImageData/HTMLImageElement. // canvas object instead of ImageData/HTMLImageElement.
const gl = this.gl!; const gl = this.gl!;
gl.activeTexture(gl.TEXTURE0); gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, confidenceMask);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, this.defaultTexture!); gl.bindTexture(gl.TEXTURE_2D, this.defaultTexture!);
gl.texImage2D( gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, defaultImage); gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, defaultImage);
gl.activeTexture(gl.TEXTURE1); gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, this.overlayTexture!); gl.bindTexture(gl.TEXTURE_2D, this.overlayTexture!);
gl.texImage2D( gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, overlayImage); gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, overlayImage);
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, confidenceMask);
} }
unbindTextures() { unbindTextures() {

View File

@ -136,7 +136,7 @@ class MPMaskTestContext {
shaderContext: MPImageShaderContext, input: MaskType, width: number, shaderContext: MPImageShaderContext, input: MaskType, width: number,
height: number): MPMask { height: number): MPMask {
return new MPMask( return new MPMask(
[input], [input], /* interpolateValues= */ false,
/* ownsWebGLTexture= */ false, context.canvas, shaderContext, width, /* ownsWebGLTexture= */ false, context.canvas, shaderContext, width,
height); height);
} }
@ -182,7 +182,7 @@ class MPMaskTestContext {
const shaderContext = new MPImageShaderContext(); const shaderContext = new MPImageShaderContext();
const mask = new MPMask( const mask = new MPMask(
[context.webGLTexture], [context.webGLTexture], /* interpolateValues= */ false,
/* ownsWebGLTexture= */ false, context.canvas, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, context.canvas, shaderContext, WIDTH,
HEIGHT); HEIGHT);
@ -196,7 +196,7 @@ class MPMaskTestContext {
const shaderContext = new MPImageShaderContext(); const shaderContext = new MPImageShaderContext();
const mask = new MPMask( const mask = new MPMask(
[context.webGLTexture], [context.webGLTexture], /* interpolateValues= */ false,
/* ownsWebGLTexture= */ false, context.canvas, shaderContext, WIDTH, /* ownsWebGLTexture= */ false, context.canvas, shaderContext, WIDTH,
HEIGHT); HEIGHT);

View File

@ -62,9 +62,25 @@ export class MPMask {
/** The format used to write pixel values from textures. */ /** The format used to write pixel values from textures. */
private static texImage2DFormat?: GLenum; private static texImage2DFormat?: GLenum;
/** @hideconstructor */ /**
* @param containers The data source for this mask as a `WebGLTexture`,
* `Unit8Array` or `Float32Array`. Multiple sources of the same data can
* be provided to reduce conversions.
* @param interpolateValues If enabled, uses `gl.LINEAR` instead of
* `gl.NEAREST` to interpolate between mask values.
* @param ownsWebGLTexture Whether the MPMask should take ownership of the
* `WebGLTexture` and free it when closed.
* @param canvas The canvas to use for rendering and conversion. Must be the
* same canvas for any WebGL resources.
* @param shaderContext A shader context that is shared between all masks from
* a single task.
* @param width The width of the mask.
* @param height The height of the mask.
* @hideconstructor
*/
constructor( constructor(
private readonly containers: MPMaskContainer[], private readonly containers: MPMaskContainer[],
readonly interpolateValues: boolean,
private ownsWebGLTexture: boolean, private ownsWebGLTexture: boolean,
/** Returns the canvas element that the mask is bound to. */ /** Returns the canvas element that the mask is bound to. */
readonly canvas: HTMLCanvasElement|OffscreenCanvas|undefined, readonly canvas: HTMLCanvasElement|OffscreenCanvas|undefined,
@ -215,7 +231,8 @@ export class MPMask {
// Create a new texture and use it to back a framebuffer // Create a new texture and use it to back a framebuffer
gl.activeTexture(gl.TEXTURE1); gl.activeTexture(gl.TEXTURE1);
destinationContainer = shaderContext.createTexture(gl, gl.NEAREST); destinationContainer = shaderContext.createTexture(
gl, this.interpolateValues ? gl.LINEAR : gl.NEAREST);
gl.bindTexture(gl.TEXTURE_2D, destinationContainer); gl.bindTexture(gl.TEXTURE_2D, destinationContainer);
const format = this.getTexImage2DFormat(); const format = this.getTexImage2DFormat();
gl.texImage2D( gl.texImage2D(
@ -242,8 +259,8 @@ export class MPMask {
} }
return new MPMask( return new MPMask(
destinationContainers, this.hasWebGLTexture(), this.canvas, destinationContainers, this.interpolateValues, this.hasWebGLTexture(),
this.shaderContext, this.width, this.height); this.canvas, this.shaderContext, this.width, this.height);
} }
private getGL(): WebGL2RenderingContext { private getGL(): WebGL2RenderingContext {
@ -254,7 +271,7 @@ export class MPMask {
} }
if (!this.gl) { if (!this.gl) {
this.gl = assertNotNull( this.gl = assertNotNull(
this.canvas.getContext('webgl2') as WebGL2RenderingContext | null, this.canvas.getContext('webgl2'),
'You cannot use a canvas that is already bound to a different ' + 'You cannot use a canvas that is already bound to a different ' +
'type of rendering context.'); 'type of rendering context.');
} }
@ -350,11 +367,8 @@ export class MPMask {
let webGLTexture = this.getContainer(MPMaskType.WEBGL_TEXTURE); let webGLTexture = this.getContainer(MPMaskType.WEBGL_TEXTURE);
if (!webGLTexture) { if (!webGLTexture) {
const shaderContext = this.getShaderContext(); const shaderContext = this.getShaderContext();
// `gl.NEAREST` ensures that we do not get interpolated values for webGLTexture = shaderContext.createTexture(
// masks. In some cases, the user might want interpolation (e.g. for gl, this.interpolateValues ? gl.LINEAR : gl.NEAREST);
// confidence masks), so we might want to make this user-configurable.
// Note that `MPImage` uses `gl.LINEAR`.
webGLTexture = shaderContext.createTexture(gl, gl.NEAREST);
this.containers.push(webGLTexture); this.containers.push(webGLTexture);
this.ownsWebGLTexture = true; this.ownsWebGLTexture = true;
} }

View File

@ -274,8 +274,9 @@ export abstract class VisionTaskRunner extends TaskRunner {
} }
/** Converts a WasmImage to an MPMask. */ /** Converts a WasmImage to an MPMask. */
protected convertToMPMask(wasmImage: WasmImage, shouldCopyData: boolean): protected convertToMPMask(
MPMask { wasmImage: WasmImage, interpolateValues: boolean,
shouldCopyData: boolean): MPMask {
const {data, width, height} = wasmImage; const {data, width, height} = wasmImage;
const pixels = width * height; const pixels = width * height;
@ -291,7 +292,7 @@ export abstract class VisionTaskRunner extends TaskRunner {
} }
const mask = new MPMask( const mask = new MPMask(
[container], [container], interpolateValues,
/* ownsWebGLTexture= */ false, this.graphRunner.wasmModule.canvas!, /* ownsWebGLTexture= */ false, this.graphRunner.wasmModule.canvas!,
this.shaderContext, width, height); this.shaderContext, width, height);
return shouldCopyData ? mask.clone() : mask; return shouldCopyData ? mask.clone() : mask;

View File

@ -424,7 +424,10 @@ export class ImageSegmenter extends VisionTaskRunner {
CONFIDENCE_MASKS_STREAM, (masks, timestamp) => { CONFIDENCE_MASKS_STREAM, (masks, timestamp) => {
this.confidenceMasks = masks.map( this.confidenceMasks = masks.map(
wasmImage => this.convertToMPMask( wasmImage => this.convertToMPMask(
wasmImage, /* shouldCopyData= */ !this.userCallback)); wasmImage,
/* interpolateValues= */ true,
/* shouldCopyData= */ !this.userCallback,
));
this.setLatestOutputTimestamp(timestamp); this.setLatestOutputTimestamp(timestamp);
}); });
this.graphRunner.attachEmptyPacketListener( this.graphRunner.attachEmptyPacketListener(
@ -442,7 +445,10 @@ export class ImageSegmenter extends VisionTaskRunner {
this.graphRunner.attachImageListener( this.graphRunner.attachImageListener(
CATEGORY_MASK_STREAM, (mask, timestamp) => { CATEGORY_MASK_STREAM, (mask, timestamp) => {
this.categoryMask = this.convertToMPMask( this.categoryMask = this.convertToMPMask(
mask, /* shouldCopyData= */ !this.userCallback); mask,
/* interpolateValues= */ false,
/* shouldCopyData= */ !this.userCallback,
);
this.setLatestOutputTimestamp(timestamp); this.setLatestOutputTimestamp(timestamp);
}); });
this.graphRunner.attachEmptyPacketListener( this.graphRunner.attachEmptyPacketListener(

View File

@ -341,7 +341,10 @@ export class InteractiveSegmenter extends VisionTaskRunner {
CONFIDENCE_MASKS_STREAM, (masks, timestamp) => { CONFIDENCE_MASKS_STREAM, (masks, timestamp) => {
this.confidenceMasks = masks.map( this.confidenceMasks = masks.map(
wasmImage => this.convertToMPMask( wasmImage => this.convertToMPMask(
wasmImage, /* shouldCopyData= */ !this.userCallback)); wasmImage,
/* interpolateValues= */ true,
/* shouldCopyData= */ !this.userCallback,
));
this.setLatestOutputTimestamp(timestamp); this.setLatestOutputTimestamp(timestamp);
}); });
this.graphRunner.attachEmptyPacketListener( this.graphRunner.attachEmptyPacketListener(
@ -359,7 +362,8 @@ export class InteractiveSegmenter extends VisionTaskRunner {
this.graphRunner.attachImageListener( this.graphRunner.attachImageListener(
CATEGORY_MASK_STREAM, (mask, timestamp) => { CATEGORY_MASK_STREAM, (mask, timestamp) => {
this.categoryMask = this.convertToMPMask( this.categoryMask = this.convertToMPMask(
mask, /* shouldCopyData= */ !this.userCallback); mask, /* interpolateValues= */ false,
/* shouldCopyData= */ !this.userCallback);
this.setLatestOutputTimestamp(timestamp); this.setLatestOutputTimestamp(timestamp);
}); });
this.graphRunner.attachEmptyPacketListener( this.graphRunner.attachEmptyPacketListener(

View File

@ -470,7 +470,8 @@ export class PoseLandmarker extends VisionTaskRunner {
SEGMENTATION_MASK_STREAM, (masks, timestamp) => { SEGMENTATION_MASK_STREAM, (masks, timestamp) => {
this.segmentationMasks = masks.map( this.segmentationMasks = masks.map(
wasmImage => this.convertToMPMask( wasmImage => this.convertToMPMask(
wasmImage, /* shouldCopyData= */ !this.userCallback)); wasmImage, /* interpolateValues= */ true,
/* shouldCopyData= */ !this.userCallback));
this.setLatestOutputTimestamp(timestamp); this.setLatestOutputTimestamp(timestamp);
}); });
this.graphRunner.attachEmptyPacketListener( this.graphRunner.attachEmptyPacketListener(