Update tests and demos to call "close".
PiperOrigin-RevId: 527746909
This commit is contained in:
parent
28b9b8d8a3
commit
5d9761cbfd
|
@ -86,6 +86,10 @@ describe('AudioClassifier', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
audioClassifier.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(audioClassifier);
|
||||
verifyListenersRegistered(audioClassifier);
|
||||
|
|
|
@ -76,6 +76,10 @@ describe('AudioEmbedder', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
audioEmbedder.close();
|
||||
});
|
||||
|
||||
it('initializes graph', () => {
|
||||
verifyGraph(audioEmbedder);
|
||||
verifyListenersRegistered(audioEmbedder);
|
||||
|
|
|
@ -62,6 +62,10 @@ describe('LanguageDetector', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
languageDetector.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(languageDetector);
|
||||
verifyListenersRegistered(languageDetector);
|
||||
|
|
|
@ -61,6 +61,10 @@ describe('TextClassifier', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
textClassifier.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(textClassifier);
|
||||
verifyListenersRegistered(textClassifier);
|
||||
|
|
|
@ -61,6 +61,10 @@ describe('TextEmbedder', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
textEmbedder.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(textEmbedder);
|
||||
verifyListenersRegistered(textEmbedder);
|
||||
|
|
|
@ -66,6 +66,10 @@ describe('FaceDetector', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
faceDetector.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(faceDetector);
|
||||
verifyListenersRegistered(faceDetector);
|
||||
|
|
|
@ -94,6 +94,10 @@ describe('FaceLandmarker', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
faceLandmarker.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(faceLandmarker);
|
||||
verifyListenersRegistered(faceLandmarker);
|
||||
|
|
|
@ -66,6 +66,10 @@ describe('FaceStylizer', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
faceStylizer.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(faceStylizer);
|
||||
verifyListenersRegistered(faceStylizer);
|
||||
|
|
|
@ -113,6 +113,10 @@ describe('GestureRecognizer', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
gestureRecognizer.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(gestureRecognizer);
|
||||
verifyListenersRegistered(gestureRecognizer);
|
||||
|
|
|
@ -83,6 +83,10 @@ describe('HandLandmarker', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
handLandmarker.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(handLandmarker);
|
||||
verifyListenersRegistered(handLandmarker);
|
||||
|
|
|
@ -66,6 +66,10 @@ describe('ImageClassifier', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
imageClassifier.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(imageClassifier);
|
||||
verifyListenersRegistered(imageClassifier);
|
||||
|
|
|
@ -62,6 +62,10 @@ describe('ImageEmbedder', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
imageEmbedder.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(imageEmbedder);
|
||||
verifyListenersRegistered(imageEmbedder);
|
||||
|
|
|
@ -68,6 +68,10 @@ describe('ImageSegmenter', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
imageSegmenter.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(imageSegmenter);
|
||||
|
||||
|
|
|
@ -84,6 +84,10 @@ describe('InteractiveSegmenter', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
interactiveSegmenter.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(interactiveSegmenter);
|
||||
|
||||
|
|
|
@ -66,6 +66,10 @@ describe('ObjectDetector', () => {
|
|||
{baseOptions: {modelAssetBuffer: new Uint8Array([])}});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
objectDetector.close();
|
||||
});
|
||||
|
||||
it('initializes graph', async () => {
|
||||
verifyGraph(objectDetector);
|
||||
verifyListenersRegistered(objectDetector);
|
||||
|
|
Loading…
Reference in New Issue
Block a user