Selaa lähdekoodia

feat: implement palm oil analysis module with ONNX inference provider support

Dr-Swopt 5 päivää sitten
vanhempi
commit
bf95197fb6

+ 133 - 0
package-lock.json

@@ -20,6 +20,7 @@
         "find-process": "^2.1.1",
         "jimp": "^1.6.1",
         "onnxruntime-node": "^1.24.3",
+        "onnxruntime-web": "^1.25.1",
         "pidusage": "^4.0.1",
         "reflect-metadata": "^0.2.2",
         "rxjs": "^7.8.1",
@@ -3151,6 +3152,70 @@
         "url": "https://opencollective.com/pkgr"
       }
     },
+    "node_modules/@protobufjs/aspromise": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
+      "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/base64": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
+      "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/codegen": {
+      "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.5.tgz",
+      "integrity": "sha512-zgXFLzW3Ap33e6d0Wlj4MGIm6Ce8O89n/apUaGNB/jx+hw+ruWEp7EwGUshdLKVRCxZW12fp9r40E1mQrf/34g==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/eventemitter": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
+      "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/fetch": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
+      "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
+      "license": "BSD-3-Clause",
+      "dependencies": {
+        "@protobufjs/aspromise": "^1.1.1",
+        "@protobufjs/inquire": "^1.1.0"
+      }
+    },
+    "node_modules/@protobufjs/float": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
+      "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/inquire": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.1.tgz",
+      "integrity": "sha512-mnzgDV26ueAvk7rsbt9L7bE0SuAoqyuys/sMMrmVcN5x9VsxpcG3rqAUSgDyLp0UZlmNfIbQ4fHfCtreVBk8Ew==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/path": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
+      "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/pool": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
+      "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==",
+      "license": "BSD-3-Clause"
+    },
+    "node_modules/@protobufjs/utf8": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.1.tgz",
+      "integrity": "sha512-oOAWABowe8EAbMyWKM0tYDKi8Yaox52D+HWZhAIJqQXbqe0xI/GV7FhLWqlEKreMkfDjshR5FKgi3mnle0h6Eg==",
+      "license": "BSD-3-Clause"
+    },
     "node_modules/@sinclair/typebox": {
       "version": "0.27.10",
       "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.10.tgz",
@@ -7622,6 +7687,12 @@
         "node": ">=16"
       }
     },
+    "node_modules/flatbuffers": {
+      "version": "25.9.23",
+      "resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-25.9.23.tgz",
+      "integrity": "sha512-MI1qs7Lo4Syw0EOzUl0xjs2lsoeqFku44KpngfIduHBYvzm8h2+7K8YMQh1JtVVVrUvhLpNwqVi4DERegUJhPQ==",
+      "license": "Apache-2.0"
+    },
     "node_modules/flatted": {
       "version": "3.4.2",
       "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz",
@@ -8154,6 +8225,12 @@
       "devOptional": true,
       "license": "ISC"
     },
+    "node_modules/guid-typescript": {
+      "version": "1.0.9",
+      "resolved": "https://registry.npmjs.org/guid-typescript/-/guid-typescript-1.0.9.tgz",
+      "integrity": "sha512-Y8T4vYhEfwJOTbouREvG+3XDsjr8E3kIr7uf+JZ0BYloFsttiHU0WfvANVsR7TxNUJa/WpCnw/Ino/p+DeBhBQ==",
+      "license": "ISC"
+    },
     "node_modules/handlebars": {
       "version": "4.7.9",
       "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.9.tgz",
@@ -9778,6 +9855,12 @@
         "url": "https://tidelift.com/funding/github/npm/loglevel"
       }
     },
+    "node_modules/long": {
+      "version": "5.3.2",
+      "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
+      "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==",
+      "license": "Apache-2.0"
+    },
     "node_modules/lowercase-keys": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz",
@@ -10712,6 +10795,26 @@
         "onnxruntime-common": "1.24.3"
       }
     },
+    "node_modules/onnxruntime-web": {
+      "version": "1.25.1",
+      "resolved": "https://registry.npmjs.org/onnxruntime-web/-/onnxruntime-web-1.25.1.tgz",
+      "integrity": "sha512-mgs61sJ9m3hLa5jGRr9Pen3kkG00vlxmrcRL6FufYpSWBZKaklo0sotQCq2fLjgDVLnW57jrDcLqzYJNKeZskQ==",
+      "license": "MIT",
+      "dependencies": {
+        "flatbuffers": "^25.1.24",
+        "guid-typescript": "^1.0.9",
+        "long": "^5.2.3",
+        "onnxruntime-common": "1.25.1",
+        "platform": "^1.3.6",
+        "protobufjs": "^7.2.4"
+      }
+    },
+    "node_modules/onnxruntime-web/node_modules/onnxruntime-common": {
+      "version": "1.25.1",
+      "resolved": "https://registry.npmjs.org/onnxruntime-common/-/onnxruntime-common-1.25.1.tgz",
+      "integrity": "sha512-kKvYQFdos4LWJqhZ+nmKu3NT8NXzw8I5x9fNUKe1rNKcPfNKnYXUtW7JBpcKFsvLtrJashRgVYSbFap4cHxvNg==",
+      "license": "MIT"
+    },
     "node_modules/optionator": {
       "version": "0.9.4",
       "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
@@ -11129,6 +11232,12 @@
         "node": ">=8"
       }
     },
+    "node_modules/platform": {
+      "version": "1.3.6",
+      "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz",
+      "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==",
+      "license": "MIT"
+    },
     "node_modules/pluralize": {
       "version": "8.0.0",
       "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz",
@@ -11286,6 +11395,30 @@
         "node": ">= 6"
       }
     },
+    "node_modules/protobufjs": {
+      "version": "7.5.6",
+      "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.6.tgz",
+      "integrity": "sha512-M71sTMB146U3u0di3yup8iM+zv8yPRNQVr1KK4tyBitl3qFvEGucq/rGDRShD2rsJhtN02RJaJ7j5X5hmy8SJg==",
+      "hasInstallScript": true,
+      "license": "BSD-3-Clause",
+      "dependencies": {
+        "@protobufjs/aspromise": "^1.1.2",
+        "@protobufjs/base64": "^1.1.2",
+        "@protobufjs/codegen": "^2.0.5",
+        "@protobufjs/eventemitter": "^1.1.0",
+        "@protobufjs/fetch": "^1.1.0",
+        "@protobufjs/float": "^1.0.2",
+        "@protobufjs/inquire": "^1.1.1",
+        "@protobufjs/path": "^1.1.2",
+        "@protobufjs/pool": "^1.1.0",
+        "@protobufjs/utf8": "^1.1.1",
+        "@types/node": ">=13.7.0",
+        "long": "^5.0.0"
+      },
+      "engines": {
+        "node": ">=12.0.0"
+      }
+    },
     "node_modules/proxy-addr": {
       "version": "2.0.7",
       "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",

+ 1 - 0
package.json

@@ -31,6 +31,7 @@
     "find-process": "^2.1.1",
     "jimp": "^1.6.1",
     "onnxruntime-node": "^1.24.3",
+    "onnxruntime-web": "^1.25.1",
     "pidusage": "^4.0.1",
     "reflect-metadata": "^0.2.2",
     "rxjs": "^7.8.1",

+ 14 - 3
src/palm-oil/palm-oil.module.ts

@@ -2,15 +2,26 @@ import { Module } from '@nestjs/common';
 import { TypeOrmModule } from '@nestjs/typeorm';
 import { PalmOilController } from './palm-oil.controller';
 import { PalmOilService } from './palm-oil.service';
-import { ScannerProvider } from './providers/scanner.provider';
 import { VisionGateway } from './vision.gateway';
 import { History } from './entities/history.entity';
 import { SurveillanceModule } from '../surveillance/surveillance.module';
+import { SCANNER_TOKEN } from './providers/scanner.interface';
+import { OnnxNativeProvider } from './providers/onnx-native.provider';
+import { OnnxWasmProvider } from './providers/onnx-wasm.provider';
+
+const backend = process.env.INFERENCE_BACKEND ?? 'onnx-native';
+const ScannerImpl = backend === 'onnx-wasm' ? OnnxWasmProvider : OnnxNativeProvider;
+
+console.log(`🔧 Inference backend: ${backend} → ${ScannerImpl.name}`);
 
 @Module({
   imports: [TypeOrmModule.forFeature([History]), SurveillanceModule],
   controllers: [PalmOilController],
-  providers: [PalmOilService, ScannerProvider, VisionGateway],
-  exports: [PalmOilService, ScannerProvider],
+  providers: [
+    PalmOilService,
+    VisionGateway,
+    { provide: SCANNER_TOKEN, useClass: ScannerImpl },
+  ],
+  exports: [PalmOilService],
 })
 export class PalmOilModule {}

+ 3 - 3
src/palm-oil/palm-oil.service.ts

@@ -1,7 +1,7 @@
-import { Injectable } from '@nestjs/common';
+import { Injectable, Inject } from '@nestjs/common';
 import { InjectRepository } from '@nestjs/typeorm';
 import { Repository } from 'typeorm';
-import { ScannerProvider } from './providers/scanner.provider';
+import { SCANNER_TOKEN, IScannerProvider } from './providers/scanner.interface';
 import { Jimp } from 'jimp';
 import { performance } from 'perf_hooks';
 import { AnalysisResponse, IndustrialSummary } from './interfaces/palm-analysis.interface';
@@ -16,7 +16,7 @@ export class PalmOilService {
   private readonly ARCHIVE_DIR = path.join(process.cwd(), 'archive');
 
   constructor(
-    private readonly scanner: ScannerProvider,
+    @Inject(SCANNER_TOKEN) private readonly scanner: IScannerProvider,
     @InjectRepository(History)
     private readonly historyRepository: Repository<History>,
   ) {

+ 106 - 0
src/palm-oil/providers/onnx-native.provider.ts

@@ -0,0 +1,106 @@
+import { Injectable, OnModuleInit } from '@nestjs/common';
+import * as onnx from 'onnxruntime-node';
+import { Jimp } from 'jimp';
+import * as path from 'path';
+import { MPOB_CLASSES, HEALTH_ALERT_CLASSES } from '../constants/mpob-standards';
+import { DetectionResult } from '../interfaces/palm-analysis.interface';
+import { IScannerProvider, InferenceTensor, ScanResult } from './scanner.interface';
+
+@Injectable()
+export class OnnxNativeProvider implements IScannerProvider, OnModuleInit {
+  private session!: onnx.InferenceSession;
+  private readonly modelPath = path.join(process.cwd(), 'best.onnx');
+
+  async onModuleInit() {
+    try {
+      this.session = await onnx.InferenceSession.create(this.modelPath);
+      console.log('✅ [onnx-native] Inference session initialized:', this.modelPath);
+    } catch (error) {
+      console.error('❌ [onnx-native] Failed to initialize:', error);
+      throw error;
+    }
+  }
+
+  async preprocess(imageBuffer: Buffer): Promise<InferenceTensor> {
+    const img = await Jimp.read(imageBuffer);
+    img.resize({ w: 640, h: 640 });
+
+    const pixels = img.bitmap.data;
+    const imageSize = 640 * 640;
+    const floatData = new Float32Array(3 * imageSize);
+
+    for (let i = 0; i < imageSize; i++) {
+      floatData[i] = pixels[i * 4] / 255.0;
+      floatData[i + imageSize] = pixels[i * 4 + 1] / 255.0;
+      floatData[i + 2 * imageSize] = pixels[i * 4 + 2] / 255.0;
+    }
+
+    const tensor = new onnx.Tensor('float32', floatData, [1, 3, 640, 640]);
+    return { data: tensor.data as Float32Array, dims: tensor.dims };
+  }
+
+  async inference(tensor: InferenceTensor): Promise<InferenceTensor> {
+    const onnxTensor = new onnx.Tensor('float32', tensor.data, [1, 3, 640, 640]);
+    const outputs = await this.session.run({ images: onnxTensor });
+    const out = outputs[Object.keys(outputs)[0]];
+    return { data: out.data as Float32Array, dims: out.dims };
+  }
+
+  async postprocess(
+    tensor: InferenceTensor,
+    originalWidth: number,
+    originalHeight: number,
+    threshold = 0.25,
+  ): Promise<ScanResult> {
+    return postprocessShared(tensor, originalWidth, originalHeight, threshold);
+  }
+}
+
+/** Shared postprocess logic — identical between native and WASM providers. */
+export function postprocessShared(
+  outputTensor: InferenceTensor,
+  originalWidth: number,
+  originalHeight: number,
+  threshold: number,
+): ScanResult {
+  const data = outputTensor.data;
+
+  const sampleRows = Math.min(5, outputTensor.dims[1]);
+  const raw_tensor_sample: number[][] = [];
+  for (let i = 0; i < sampleRows; i++) {
+    const offset = i * 6;
+    raw_tensor_sample.push([
+      parseFloat(data[offset].toFixed(6)),
+      parseFloat(data[offset + 1].toFixed(6)),
+      parseFloat(data[offset + 2].toFixed(6)),
+      parseFloat(data[offset + 3].toFixed(6)),
+      parseFloat(data[offset + 4].toFixed(6)),
+      parseFloat(data[offset + 5].toFixed(6)),
+    ]);
+  }
+
+  const results: DetectionResult[] = [];
+  const numCandidates = outputTensor.dims[1];
+
+  for (let i = 0; i < numCandidates; i++) {
+    const offset = i * 6;
+    const confidence = data[offset + 4];
+    if (confidence < threshold) continue;
+
+    const className = MPOB_CLASSES[Math.round(data[offset + 5])] || 'Unknown';
+    results.push({
+      bunch_id: results.length + 1,
+      class: className,
+      confidence: parseFloat(confidence.toFixed(4)),
+      is_health_alert: HEALTH_ALERT_CLASSES.includes(className),
+      box: [
+        data[offset] * originalWidth,
+        data[offset + 1] * originalHeight,
+        data[offset + 2] * originalWidth,
+        data[offset + 3] * originalHeight,
+      ],
+    });
+  }
+
+  return { detections: results, raw_tensor_sample };
+}

+ 60 - 0
src/palm-oil/providers/onnx-wasm.provider.ts

@@ -0,0 +1,60 @@
+import { Injectable, OnModuleInit } from '@nestjs/common';
+import * as ort from 'onnxruntime-web';
+import { Jimp } from 'jimp';
+import * as path from 'path';
+import { IScannerProvider, InferenceTensor, ScanResult } from './scanner.interface';
+import { postprocessShared } from './onnx-native.provider';
+
+// Single-threaded WASM — safer on low-resource / ARM environments (Android/Termux)
+ort.env.wasm.numThreads = 1;
+
+@Injectable()
+export class OnnxWasmProvider implements IScannerProvider, OnModuleInit {
+  private session!: ort.InferenceSession;
+  private readonly modelPath = path.join(process.cwd(), 'best.onnx');
+
+  async onModuleInit() {
+    try {
+      this.session = await ort.InferenceSession.create(this.modelPath, {
+        executionProviders: ['wasm'],
+      });
+      console.log('✅ [onnx-wasm] Inference session initialized:', this.modelPath);
+    } catch (error) {
+      console.error('❌ [onnx-wasm] Failed to initialize:', error);
+      throw error;
+    }
+  }
+
+  async preprocess(imageBuffer: Buffer): Promise<InferenceTensor> {
+    const img = await Jimp.read(imageBuffer);
+    img.resize({ w: 640, h: 640 });
+
+    const pixels = img.bitmap.data;
+    const imageSize = 640 * 640;
+    const floatData = new Float32Array(3 * imageSize);
+
+    for (let i = 0; i < imageSize; i++) {
+      floatData[i] = pixels[i * 4] / 255.0;
+      floatData[i + imageSize] = pixels[i * 4 + 1] / 255.0;
+      floatData[i + 2 * imageSize] = pixels[i * 4 + 2] / 255.0;
+    }
+
+    return { data: floatData, dims: [1, 3, 640, 640] };
+  }
+
+  async inference(tensor: InferenceTensor): Promise<InferenceTensor> {
+    const ortTensor = new ort.Tensor('float32', tensor.data, [1, 3, 640, 640]);
+    const outputs = await this.session.run({ images: ortTensor });
+    const out = outputs[Object.keys(outputs)[0]];
+    return { data: out.data as Float32Array, dims: out.dims };
+  }
+
+  async postprocess(
+    tensor: InferenceTensor,
+    originalWidth: number,
+    originalHeight: number,
+    threshold = 0.25,
+  ): Promise<ScanResult> {
+    return postprocessShared(tensor, originalWidth, originalHeight, threshold);
+  }
+}

+ 25 - 0
src/palm-oil/providers/scanner.interface.ts

@@ -0,0 +1,25 @@
+import { DetectionResult } from '../interfaces/palm-analysis.interface';
+
+export const SCANNER_TOKEN = 'IScannerProvider';
+
+export interface ScanResult {
+  detections: DetectionResult[];
+  raw_tensor_sample: number[][];
+}
+
+/** Minimal tensor shape used to thread tensors between provider stages. */
+export interface InferenceTensor {
+  data: Float32Array;
+  dims: readonly number[];
+}
+
+export interface IScannerProvider {
+  preprocess(imageBuffer: Buffer): Promise<InferenceTensor>;
+  inference(tensor: InferenceTensor): Promise<InferenceTensor>;
+  postprocess(
+    tensor: InferenceTensor,
+    originalWidth: number,
+    originalHeight: number,
+    threshold?: number,
+  ): Promise<ScanResult>;
+}

+ 0 - 124
src/palm-oil/providers/scanner.provider.ts

@@ -1,124 +0,0 @@
-import { Injectable, OnModuleInit } from '@nestjs/common';
-import * as onnx from 'onnxruntime-node';
-import { Jimp } from 'jimp';
-import * as path from 'path';
-import { MPOB_CLASSES, HEALTH_ALERT_CLASSES } from '../constants/mpob-standards';
-import { DetectionResult } from '../interfaces/palm-analysis.interface';
-
-export interface ScanResult {
-  detections: DetectionResult[];
-  raw_tensor_sample: number[][];
-}
-
-@Injectable()
-export class ScannerProvider implements OnModuleInit {
-  private session!: onnx.InferenceSession;
-  private readonly modelPath = path.join(process.cwd(), 'best.onnx');
-
-  async onModuleInit() {
-    try {
-      this.session = await onnx.InferenceSession.create(this.modelPath);
-      console.log('✅ ONNX Inference Session initialized from:', this.modelPath);
-    } catch (error) {
-      console.error('❌ Failed to initialize ONNX Inference Session:', error);
-      throw error;
-    }
-  }
-
-  /**
-   * Preprocesses the image buffer: resize to 640x640, transpose HWC to CHW, and normalize.
-   */
-  async preprocess(imageBuffer: Buffer): Promise<onnx.Tensor> {
-    const img = await Jimp.read(imageBuffer);
-    img.resize({ w: 640, h: 640 });
-
-    const pixels = img.bitmap.data; // RGBA: [R, G, B, A, R, G, B, A, ...]
-    const width = img.width;
-    const height = img.height;
-    const imageSize = width * height;
-    const floatData = new Float32Array(3 * imageSize);
-
-    // HWC (RGBA) to CHW (RGB) transposition — stride 4, skip alpha
-    // floatData: [R1, R2, ..., G1, G2, ..., B1, B2, ...]
-    for (let i = 0; i < imageSize; i++) {
-      floatData[i] = pixels[i * 4] / 255.0;               // R
-      floatData[i + imageSize] = pixels[i * 4 + 1] / 255.0; // G
-      floatData[i + 2 * imageSize] = pixels[i * 4 + 2] / 255.0; // B
-    }
-
-    return new onnx.Tensor('float32', floatData, [1, 3, 640, 640]);
-  }
-
-  /**
-   * Executes the ONNX session with the preprocessed tensor.
-   */
-  async inference(tensor: onnx.Tensor): Promise<onnx.Tensor> {
-    const inputs = { images: tensor };
-    const outputs = await this.session.run(inputs);
-    
-    // The model typically returns the output under a generic name like 'output0' or 'outputs'
-    // We'll take the first output key available
-    const outputKey = Object.keys(outputs)[0];
-    return outputs[outputKey];
-  }
-
-  /**
-   * Post-processes the model output: filtering, scaling, and mapping to MPOB standards.
-   */
-  async postprocess(
-    outputTensor: onnx.Tensor,
-    originalWidth: number,
-    originalHeight: number,
-    threshold: number = 0.25,
-  ): Promise<ScanResult> {
-    const data = outputTensor.data as Float32Array;
-    // Expected shape: [1, 300, 6]
-    // Each candidate: [x1, y1, x2, y2, confidence, class_index]
-
-    // Capture first 5 raw rows before NMS filtering — the AI's unfiltered "thought process"
-    const sampleRows = Math.min(5, outputTensor.dims[1]);
-    const raw_tensor_sample: number[][] = [];
-    for (let i = 0; i < sampleRows; i++) {
-      const offset = i * 6;
-      raw_tensor_sample.push([
-        parseFloat(data[offset].toFixed(6)),
-        parseFloat(data[offset + 1].toFixed(6)),
-        parseFloat(data[offset + 2].toFixed(6)),
-        parseFloat(data[offset + 3].toFixed(6)),
-        parseFloat(data[offset + 4].toFixed(6)),
-        parseFloat(data[offset + 5].toFixed(6)),
-      ]);
-    }
-
-    const results: DetectionResult[] = [];
-    const numCandidates = outputTensor.dims[1];
-
-    for (let i = 0; i < numCandidates; i++) {
-      const offset = i * 6;
-      const x1 = data[offset];
-      const y1 = data[offset + 1];
-      const x2 = data[offset + 2];
-      const y2 = data[offset + 3];
-      const confidence = data[offset + 4];
-      const classIndex = data[offset + 5];
-
-      if (confidence >= threshold) {
-        const className = MPOB_CLASSES[Math.round(classIndex)] || 'Unknown';
-        results.push({
-          bunch_id: results.length + 1,
-          class: className,
-          confidence: parseFloat(confidence.toFixed(4)),
-          is_health_alert: HEALTH_ALERT_CLASSES.includes(className),
-          box: [
-            data[offset] * originalWidth,
-            data[offset + 1] * originalHeight,
-            data[offset + 2] * originalWidth,
-            data[offset + 3] * originalHeight,
-          ],
-        });
-      }
-    }
-
-    return { detections: results, raw_tensor_sample };
-  }
-}