Przeglądaj źródła

enhancment for UI to include API mode inferencing with nestjs

Dr-Swopt 1 tydzień temu
rodzic
commit
81e0c0a487

+ 9 - 1
frontend/src/app/components/analyzer/analyzer.component.html

@@ -16,8 +16,16 @@
 
       <div class="controls glass-panel">
         <div class="field">
+          <label>Processing Mode</label>
+          <select [ngModel]="inferenceService.mode()" (ngModelChange)="inferenceService.mode.set($event)" class="input">
+            <option value="local">Edge AI (Low Latency)</option>
+            <option value="api">API AI (High Fidelity)</option>
+          </select>
+        </div>
+
+        <div class="field" *ngIf="inferenceService.mode() === 'local'">
           <label>Local Engine Model</label>
-          <select [(ngModel)]="modelType" class="input">
+          <select [ngModel]="inferenceService.localEngine()" (ngModelChange)="inferenceService.localEngine.set($event)" class="input">
             <option value="onnx">YOLOv8 Industrial (ONNX)</option>
             <option value="tflite">Standard PoC (TFLite FP32)</option>
           </select>

+ 44 - 61
frontend/src/app/components/analyzer/analyzer.component.ts

@@ -3,6 +3,7 @@ import { CommonModule } from '@angular/common';
 import { ImageProcessorService } from '../../services/image-processor.service';
 import { LocalInferenceService } from '../../services/local-inference.service';
 import { LocalHistoryService } from '../../services/local-history.service';
+import { InferenceService } from '../../core/services/inference.service';
 import { FormsModule } from '@angular/forms';
 
 @Component({
@@ -24,7 +25,7 @@ export class AnalyzerComponent implements OnInit {
 
   constructor(
     private imageProcessor: ImageProcessorService,
-    private localInference: LocalInferenceService,
+    public inferenceService: InferenceService,
     private localHistory: LocalHistoryService
   ) {}
 
@@ -76,75 +77,57 @@ export class AnalyzerComponent implements OnInit {
   }
 
   async analyze(): Promise<void> {
-    if (!this.selectedFile) return;
+    if (!this.selectedFile || !this.previewUrl) return;
 
     this.loading = true;
     const start = performance.now();
 
     try {
-      // 1. Path Mapping: ONNX for industry, TFLite (Float32) for standard PoC
-      const modelPath = this.modelType === 'onnx' 
-        ? 'assets/models/onnx/best.onnx' 
-        : 'assets/models/tflite/best_float32.tflite';
-      
-      await this.localInference.loadModel(modelPath);
-
-      // Get original image dimensions to pass to the parser
+      // Get image dimensions
       const img = await this.loadImageDimensions(this.selectedFile);
 
-      // 2. Preprocess the image to Float32Array [1, 3, 640, 640]
-      const imageData = await this.imageProcessor.processImage(this.selectedFile);
-
-      // 3. Run Inference locally
-      const rawData = await this.localInference.runInference(imageData);
-
-      if (!rawData) {
-        throw new Error("Inference failed to produce data.");
-      }
-
-      // 4. Decode Tensor -> Detections
-      const detections = this.localInference.parseDetections(
-        rawData,
-        this.confidence,
-        img.width,
-        img.height
-      );
-
-      // 5. Calculate Industrial Summary - Updated to match new categories
-      const summary: any = { 
-        'Empty_Bunch': 0, 
-        'Underripe': 0, 
-        'Abnormal': 0, 
-        'Ripe': 0, 
-        'Unripe': 0, 
-        'Overripe': 0 
-      };
-      
-      detections.forEach((d: any) => {
-        if (summary[d.class] !== undefined) summary[d.class]++;
+      // Use the Master Inference Service Hub
+      this.inferenceService.analyze(this.previewUrl, img.width, img.height).subscribe({
+        next: (detections) => {
+          // Calculate Industrial Summary
+          const summary: any = { 
+            'Empty_Bunch': 0, 
+            'Underripe': 0, 
+            'Abnormal': 0, 
+            'Ripe': 0, 
+            'Unripe': 0, 
+            'Overripe': 0 
+          };
+          
+          detections.forEach((d: any) => {
+            if (summary[d.class] !== undefined) summary[d.class]++;
+          });
+          
+          this.results = {
+            industrial_summary: summary,
+            inference_ms: performance.now() - start,
+            detections: detections
+          };
+
+          // Persist to local vault
+          this.localHistory.saveRecord(
+            this.results, 
+            this.selectedFile!.name, 
+            this.inferenceService.mode(),
+            this.previewUrl!,
+            img
+          );
+
+          this.loading = false;
+          setTimeout(() => this.drawDetections(), 100);
+        },
+        error: (err) => {
+          console.error('Analysis Failed:', err);
+          this.loading = false;
+        }
       });
-      
-      this.results = {
-        industrial_summary: summary,
-        inference_ms: performance.now() - start,
-        detections: detections
-      };
-
-      // PERSIST TO LOCAL VAULT
-      this.localHistory.saveRecord(
-        this.results, 
-        this.selectedFile!.name, 
-        this.modelType,
-        this.previewUrl!,
-        img
-      );
-
-      console.log('Backend-less PoC: Parsed Detections:', detections);
-      
-      this.loading = false;
-      setTimeout(() => this.drawDetections(), 100);
     } catch (err) {
-      console.error('Local Analysis Failed:', err);
+      console.error('Processing Pipeline Error:', err);
       this.loading = false;
     }
   }

+ 30 - 0
frontend/src/app/core/interfaces/palm-analysis.interface.ts

@@ -0,0 +1,30 @@
+export interface BoundingBox {
+  x1: number;
+  y1: number;
+  x2: number;
+  y2: number;
+}
+
+export interface DetectionResult {
+  bunch_id: number;
+  class: string;
+  confidence: number;
+  is_health_alert: boolean;
+  box: [number, number, number, number]; // [x1, y1, x2, y2]
+  norm_box?: [number, number, number, number]; // Normalized
+}
+
+export interface IndustrialSummary {
+  [className: string]: number;
+}
+
+export interface AnalysisResponse {
+  status: string;
+  current_threshold: number;
+  total_count: number;
+  industrial_summary: IndustrialSummary;
+  detections: DetectionResult[];
+  inference_ms: number;
+  processing_ms: number;
+  archive_id: string;
+}

+ 98 - 0
frontend/src/app/core/services/inference.service.ts

@@ -0,0 +1,98 @@
+import { Injectable, signal } from '@angular/core';
+import { Observable, from, map, catchError, of, switchMap } from 'rxjs';
+import { LocalInferenceService } from '../../services/local-inference.service';
+import { RemoteInferenceService } from './remote-inference.service';
+import { ImageProcessorService } from '../../services/image-processor.service';
+import { AnalysisResponse, DetectionResult } from '../interfaces/palm-analysis.interface';
+
+export type InferenceMode = 'local' | 'api';
+export type LocalEngine = 'onnx' | 'tflite';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class InferenceService {
+  // Use Signal to track processing mode and local engine
+  public mode = signal<InferenceMode>('local');
+  public localEngine = signal<LocalEngine>('onnx');
+
+  constructor(
+    private readonly localInferenceService: LocalInferenceService,
+    private readonly remoteInferenceService: RemoteInferenceService,
+    private readonly imageProcessor: ImageProcessorService
+  ) {}
+
+  /**
+   * Main analyze entry point. imageData is base64 string.
+   */
+  analyze(imageData: string, width: number, height: number): Observable<any[]> {
+    if (this.mode() === 'local') {
+      return this.runLocalAnalysis(imageData, width, height);
+    } else {
+      return this.runRemoteAnalysis(imageData);
+    }
+  }
+
+  private runLocalAnalysis(imageData: string, width: number, height: number): Observable<any[]> {
+    const blob = this.base64ToBlob(imageData);
+    const file = new File([blob], 'capture.jpg', { type: 'image/jpeg' });
+
+    const modelPath = this.localEngine() === 'onnx' 
+      ? 'assets/models/onnx/best.onnx' 
+      : 'assets/models/tflite/best_float32.tflite';
+
+    return from(this.localInferenceService.loadModel(modelPath)).pipe(
+      switchMap(() => from(this.imageProcessor.processImage(file))),
+      switchMap(processedData => from(this.localInferenceService.runInference(processedData))),
+      map(rawData => {
+        if (!rawData) return [];
+        return this.localInferenceService.parseDetections(rawData, 0.25, width, height);
+      }),
+      catchError(err => {
+        console.error('Local Inference Hub Error:', err);
+        return of([]);
+      })
+    );
+  }
+
+  private runRemoteAnalysis(imageData: string): Observable<any[]> {
+    const blob = this.base64ToBlob(imageData);
+    return this.remoteInferenceService.analyze(blob).pipe(
+      map((response: AnalysisResponse) => {
+        // Map Result to the internal UI format
+        // Coordinate Sync: Use absolute pixels directly from API
+        return response.detections.map(det => ({
+          box: det.box, // Already [x1, y1, x2, y2]
+          confidence: det.confidence,
+          class: det.class,
+          color: this.getGradeColor(det.class),
+          isHealthAlert: det.is_health_alert
+        }));
+      }),
+      catchError(err => {
+        console.error('Remote Inference Hub Error:', err);
+        return of([]);
+      })
+    );
+  }
+
+  private base64ToBlob(base64: string): Blob {
+    const parts = base64.split(',');
+    const byteString = atob(parts[1]);
+    const mimeString = parts[0].split(':')[1].split(';')[0];
+    const ab = new ArrayBuffer(byteString.length);
+    const ia = new Uint8Array(ab);
+    for (let i = 0; i < byteString.length; i++) {
+        ia[i] = byteString.charCodeAt(i);
+    }
+    return new Blob([ab], { type: mimeString });
+  }
+
+  private getGradeColor(className: string): string {
+    const colors: { [key: string]: string } = {
+        'Empty_Bunch': '#6C757D', 'Underripe': '#F9A825', 'Abnormal': '#DC3545',
+        'Ripe': '#00A651', 'Unripe': '#9E9D24', 'Overripe': '#5D4037'
+    };
+    return colors[className] || '#000000';
+  }
+}

+ 27 - 0
frontend/src/app/core/services/remote-inference.service.ts

@@ -0,0 +1,27 @@
+import { Injectable } from '@angular/core';
+import { HttpClient } from '@angular/common/http';
+import { Observable, catchError, throwError } from 'rxjs';
+import { environment } from '../../../environments/environment';
+import { AnalysisResponse } from '../interfaces/palm-analysis.interface';
+
+@Injectable({
+  providedIn: 'root'
+})
+export class RemoteInferenceService {
+  private readonly apiUrl = `${environment.apiUrl}/palm-oil/analyze`;
+
+  constructor(private http: HttpClient) {}
+
+  analyze(imageBlob: Blob): Observable<AnalysisResponse> {
+    const formData = new FormData();
+    // 'image' must match the @FileInterceptor('image') in NestJS
+    formData.append('image', imageBlob, 'capture.jpg');
+
+    return this.http.post<AnalysisResponse>(this.apiUrl, formData).pipe(
+      catchError((error) => {
+        console.error('Remote Inference Error:', error);
+        return throwError(() => new Error('Remote API unreachable. Please check your connection or switch to Edge Mode.'));
+      })
+    );
+  }
+}

+ 4 - 0
frontend/src/environments/environment.ts

@@ -0,0 +1,4 @@
+export const environment = {
+  production: false,
+  apiUrl: 'http://localhost:3000'
+};