|
|
@@ -15,12 +15,16 @@ import { MatOptionModule } from '@angular/material/core';
|
|
|
|
|
|
interface TrackedFace {
|
|
|
box: faceapi.Box;
|
|
|
- lastRecognized?: number; // timestamp of last detection (starts cooldown)
|
|
|
recognizedName?: string;
|
|
|
recognitionConfidence?: number;
|
|
|
imageBase64?: string;
|
|
|
-}
|
|
|
|
|
|
+ isRecognizing?: boolean;
|
|
|
+ lastScanTime?: number;
|
|
|
+
|
|
|
+ currentColor?: string; // always store current border color
|
|
|
+ resultColor?: string; // temporarily holds green/red after scan
|
|
|
+}
|
|
|
interface RecognizedProfile {
|
|
|
name: string;
|
|
|
confidence: number;
|
|
|
@@ -44,7 +48,7 @@ interface RecognizedProfile {
|
|
|
templateUrl: './webcam.component.html',
|
|
|
styleUrls: ['./webcam.component.css']
|
|
|
})
|
|
|
-export class WebcamComponent implements AfterViewInit, OnDestroy {
|
|
|
+export class WebcamComponent implements AfterViewInit {
|
|
|
@ViewChild('video') videoRef!: ElementRef<HTMLVideoElement>;
|
|
|
@ViewChild('canvas') canvasRef!: ElementRef<HTMLCanvasElement>;
|
|
|
|
|
|
@@ -53,6 +57,9 @@ export class WebcamComponent implements AfterViewInit, OnDestroy {
|
|
|
private trackedFaces: TrackedFace[] = [];
|
|
|
private recognitionCooldown = 5000; // 5 seconds cooldown per face
|
|
|
|
|
|
+ private activeResultTimeout: any;
|
|
|
+ public activeResult: RecognizedProfile | null = null;
|
|
|
+
|
|
|
recognizedProfiles: RecognizedProfile[] = [];
|
|
|
public selectedModel: 'VGG-Face' | 'Facenet' | 'OpenFace' = 'VGG-Face';
|
|
|
|
|
|
@@ -66,11 +73,24 @@ export class WebcamComponent implements AfterViewInit, OnDestroy {
|
|
|
await this.setupCamera();
|
|
|
await this.loadFaceModels();
|
|
|
|
|
|
- this.detectionInterval = setInterval(() => this.detectFaces(), 200); // ~5 FPS
|
|
|
- }
|
|
|
+ const video = this.videoRef.nativeElement;
|
|
|
+ const canvas = this.canvasRef.nativeElement;
|
|
|
+
|
|
|
+ // Wait until video has actual dimensions
|
|
|
+ await new Promise<void>(resolve => {
|
|
|
+ if (video.videoWidth && video.videoHeight) {
|
|
|
+ resolve();
|
|
|
+ } else {
|
|
|
+ video.onloadedmetadata = () => resolve();
|
|
|
+ }
|
|
|
+ });
|
|
|
+
|
|
|
+ // Set canvas size **once**
|
|
|
+ canvas.width = video.videoWidth;
|
|
|
+ canvas.height = video.videoHeight;
|
|
|
|
|
|
- ngOnDestroy() {
|
|
|
- if (this.detectionInterval) clearInterval(this.detectionInterval);
|
|
|
+ // Start detection loop using setInterval
|
|
|
+ this.detectionInterval = setInterval(() => this.detectFaces(), 200); // ~5 FPS
|
|
|
}
|
|
|
|
|
|
private async setupCamera() {
|
|
|
@@ -88,55 +108,60 @@ export class WebcamComponent implements AfterViewInit, OnDestroy {
|
|
|
console.log('[INFO] Face-api.js models loaded');
|
|
|
}
|
|
|
|
|
|
+ private getFaceColor(face: TrackedFace) {
|
|
|
+ // Use resultColor if set; otherwise currentColor
|
|
|
+ return face.resultColor || face.currentColor || 'white';
|
|
|
+ }
|
|
|
+
|
|
|
+ private startDetectionLoop() {
|
|
|
+ const loop = async () => {
|
|
|
+ await this.detectFaces();
|
|
|
+ requestAnimationFrame(loop);
|
|
|
+ };
|
|
|
+ loop();
|
|
|
+ }
|
|
|
+
|
|
|
private async detectFaces() {
|
|
|
const video = this.videoRef.nativeElement;
|
|
|
const canvas = this.canvasRef.nativeElement;
|
|
|
- const displaySize = { width: video.videoWidth, height: video.videoHeight };
|
|
|
- canvas.width = displaySize.width;
|
|
|
- canvas.height = displaySize.height;
|
|
|
-
|
|
|
- // Detect faces
|
|
|
- const detections = await faceapi.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions());
|
|
|
const ctx = canvas.getContext('2d');
|
|
|
if (!ctx) return;
|
|
|
+
|
|
|
+ // Clear canvas each frame
|
|
|
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
|
|
|
|
- // Clear recognized profiles if a new detection appears
|
|
|
- if (detections.length && detections.length !== this.trackedFaces.length) {
|
|
|
- this.recognizedProfiles = [];
|
|
|
- }
|
|
|
+ // Detect faces
|
|
|
+ const detections = await faceapi.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions());
|
|
|
+ const now = Date.now();
|
|
|
|
|
|
- // Track faces
|
|
|
+ // Track and update faces
|
|
|
detections.forEach(det => {
|
|
|
const box = det.box;
|
|
|
- const tracked = this.trackedFaces.find(f => this.isSameFace(f.box, box));
|
|
|
- if (!tracked) {
|
|
|
- this.trackedFaces.push({ box });
|
|
|
- }
|
|
|
- });
|
|
|
+ let tracked = this.trackedFaces.find(f => this.isSameFace(f.box, box));
|
|
|
|
|
|
- // Draw bounding boxes and trigger recognition
|
|
|
- const now = Date.now();
|
|
|
- for (let face of this.trackedFaces) {
|
|
|
- let borderColor = 'white';
|
|
|
-
|
|
|
- if (face.recognizedName) {
|
|
|
- borderColor = face.recognizedName === 'Unknown' ? 'red' : 'green';
|
|
|
+ if (!tracked) {
|
|
|
+ tracked = { box, currentColor: 'white' };
|
|
|
+ this.trackedFaces.push(tracked);
|
|
|
+ } else {
|
|
|
+ tracked.box = box; // update position
|
|
|
}
|
|
|
|
|
|
- const cooldown = 2000;
|
|
|
- if (!face.lastRecognized || now - face.lastRecognized > cooldown) {
|
|
|
- face.lastRecognized = now;
|
|
|
- this.recognizeFace(face);
|
|
|
+ const elapsed = now - (tracked.lastScanTime || 0);
|
|
|
+ if (!tracked.isRecognizing && elapsed >= 4000) {
|
|
|
+ this.recognizeFace(tracked);
|
|
|
}
|
|
|
+ });
|
|
|
|
|
|
- this.drawBox(ctx, face.box, borderColor);
|
|
|
- }
|
|
|
-
|
|
|
- // Remove faces not detected anymore
|
|
|
+ // Remove faces no longer detected
|
|
|
this.trackedFaces = this.trackedFaces.filter(face =>
|
|
|
detections.some(det => this.isSameFace(det.box, face.box))
|
|
|
);
|
|
|
+
|
|
|
+ // Draw tracked faces
|
|
|
+ this.trackedFaces.forEach(face => {
|
|
|
+ const color = face.resultColor || face.currentColor || 'white';
|
|
|
+ this.drawBox(ctx, face.box, color);
|
|
|
+ });
|
|
|
}
|
|
|
|
|
|
private drawBox(ctx: CanvasRenderingContext2D, box: faceapi.Box, color: string, lineWidth: number = 2) {
|
|
|
@@ -154,6 +179,17 @@ export class WebcamComponent implements AfterViewInit, OnDestroy {
|
|
|
}
|
|
|
|
|
|
private recognizeFace(face: TrackedFace) {
|
|
|
+ // Prevent overlapping scans
|
|
|
+ if (face.isRecognizing) return;
|
|
|
+
|
|
|
+ // Cancel active result timeout if needed
|
|
|
+ if (this.activeResultTimeout) {
|
|
|
+ clearTimeout(this.activeResultTimeout);
|
|
|
+ this.activeResultTimeout = null;
|
|
|
+ }
|
|
|
+
|
|
|
+ face.isRecognizing = true; // scanning in progress
|
|
|
+
|
|
|
const video = this.videoRef.nativeElement;
|
|
|
const tempCanvas = document.createElement('canvas');
|
|
|
tempCanvas.width = face.box.width;
|
|
|
@@ -167,34 +203,55 @@ export class WebcamComponent implements AfterViewInit, OnDestroy {
|
|
|
0, 0, face.box.width, face.box.height
|
|
|
);
|
|
|
|
|
|
- // Store the captured face image
|
|
|
- face.imageBase64 = tempCanvas.toDataURL('image/jpeg');
|
|
|
-
|
|
|
- const base64Image = face.imageBase64.split(',')[1];
|
|
|
+ const base64Image = tempCanvas.toDataURL('image/jpeg').split(',')[1];
|
|
|
|
|
|
this.faceService.scanFace(base64Image, this.selectedModel).subscribe({
|
|
|
- next: (res: FaceScanResult) => {
|
|
|
- const color = res.name === 'Unknown' ? '#fde0e0' : '#e0f7e9';
|
|
|
-
|
|
|
- // Add profile if not already present
|
|
|
- if (!this.recognizedProfiles.find(p => p.name === res.name)) {
|
|
|
- this.recognizedProfiles.push({
|
|
|
- name: res.name,
|
|
|
- confidence: res.confidence,
|
|
|
- color,
|
|
|
- photoUrl: res.photoUrl, // use server image
|
|
|
- modelName: this.selectedModel
|
|
|
- });
|
|
|
- }
|
|
|
-
|
|
|
+ next: res => {
|
|
|
+ face.isRecognizing = false;
|
|
|
+ face.lastScanTime = Date.now(); // **cooldown starts after success**
|
|
|
face.recognizedName = res.name || 'Unknown';
|
|
|
face.recognitionConfidence = res.confidence || 0;
|
|
|
- this.scanStatus = res.name && res.name !== 'Unknown' ? 'success' : 'fail';
|
|
|
+
|
|
|
+ face.resultColor = res.name === 'Unknown' ? 'red' : 'green';
|
|
|
+
|
|
|
+ // Set as active result
|
|
|
+ this.activeResult = {
|
|
|
+ name: res.name || 'Unknown',
|
|
|
+ confidence: res.confidence || 0,
|
|
|
+ color: face.resultColor === 'red' ? '#fde0e0' : '#e0f7e9',
|
|
|
+ photoUrl: res.photoUrl,
|
|
|
+ modelName: this.selectedModel
|
|
|
+ };
|
|
|
+
|
|
|
+ this.activeResultTimeout = setTimeout(() => {
|
|
|
+ this.activeResult = null;
|
|
|
+ face.resultColor = undefined;
|
|
|
+ this.activeResultTimeout = null;
|
|
|
+ }, 5000);
|
|
|
+
|
|
|
+ // Keep resultColor on canvas for 2s
|
|
|
+ setTimeout(() => face.resultColor = undefined, 2000);
|
|
|
},
|
|
|
- error: (err) => {
|
|
|
- console.error('Recognition failed:', err);
|
|
|
+ error: () => {
|
|
|
+ face.isRecognizing = false;
|
|
|
+ face.lastScanTime = Date.now(); // cooldown starts after failure
|
|
|
face.recognizedName = 'Unknown';
|
|
|
- this.scanStatus = 'fail';
|
|
|
+ face.resultColor = 'red';
|
|
|
+
|
|
|
+ this.activeResult = {
|
|
|
+ name: 'Unknown',
|
|
|
+ confidence: 0,
|
|
|
+ color: '#fde0e0',
|
|
|
+ modelName: this.selectedModel
|
|
|
+ };
|
|
|
+
|
|
|
+ this.activeResultTimeout = setTimeout(() => {
|
|
|
+ this.activeResult = null;
|
|
|
+ face.resultColor = undefined;
|
|
|
+ this.activeResultTimeout = null;
|
|
|
+ }, 5000);
|
|
|
+
|
|
|
+ setTimeout(() => face.resultColor = undefined, 2000);
|
|
|
}
|
|
|
});
|
|
|
}
|