|
@@ -294,9 +294,23 @@ export class AnalyzerComponent implements OnInit, OnDestroy {
|
|
|
reader.onload = (e) => {
|
|
reader.onload = (e) => {
|
|
|
const base64 = e.target?.result as string;
|
|
const base64 = e.target?.result as string;
|
|
|
if (!base64) return;
|
|
if (!base64) return;
|
|
|
- this.snappedFrame = base64;
|
|
|
|
|
- this.visionSocket.sendBase64(base64);
|
|
|
|
|
- this.waitForSocketResult();
|
|
|
|
|
|
|
+
|
|
|
|
|
+ // Rescale the gallery image to 640×640 before sending and storing as
|
|
|
|
|
+ // snappedFrame. The backend always runs inference in 640×640 space, so
|
|
|
|
|
+ // the canvas background must match that same square crop to keep bounding
|
|
|
|
|
+ // boxes aligned with the displayed image.
|
|
|
|
|
+ const img = new Image();
|
|
|
|
|
+ img.onload = () => {
|
|
|
|
|
+ const offscreen = document.createElement('canvas');
|
|
|
|
|
+ offscreen.width = 640;
|
|
|
|
|
+ offscreen.height = 640;
|
|
|
|
|
+ offscreen.getContext('2d')!.drawImage(img, 0, 0, 640, 640);
|
|
|
|
|
+ const scaled640 = offscreen.toDataURL('image/jpeg');
|
|
|
|
|
+ this.snappedFrame = scaled640;
|
|
|
|
|
+ this.visionSocket.sendBase64(scaled640);
|
|
|
|
|
+ this.waitForSocketResult();
|
|
|
|
|
+ };
|
|
|
|
|
+ img.src = base64;
|
|
|
};
|
|
};
|
|
|
reader.readAsDataURL(this.socketGalleryFile);
|
|
reader.readAsDataURL(this.socketGalleryFile);
|
|
|
}
|
|
}
|
|
@@ -346,19 +360,25 @@ export class AnalyzerComponent implements OnInit, OnDestroy {
|
|
|
const img = new Image();
|
|
const img = new Image();
|
|
|
img.src = this.snappedFrame;
|
|
img.src = this.snappedFrame;
|
|
|
img.onload = () => {
|
|
img.onload = () => {
|
|
|
|
|
+ // The canvas display width matches the container.
|
|
|
|
|
+ // The canvas logical size is always set to 640×640 because the backend
|
|
|
|
|
+ // always runs inference in 640×640 space — coords are always 640-relative
|
|
|
|
|
+ // regardless of whether the source was a webcam snap (already 640×640) or
|
|
|
|
|
+ // a gallery image (arbitrary size sent as-is; backend rescales internally).
|
|
|
const containerWidth = canvas.parentElement!.clientWidth || 640;
|
|
const containerWidth = canvas.parentElement!.clientWidth || 640;
|
|
|
- const scale = containerWidth / img.width;
|
|
|
|
|
canvas.width = containerWidth;
|
|
canvas.width = containerWidth;
|
|
|
- canvas.height = img.height * scale;
|
|
|
|
|
|
|
+ canvas.height = containerWidth; // square: 640px inference space
|
|
|
|
|
|
|
|
const ctx = canvas.getContext('2d');
|
|
const ctx = canvas.getContext('2d');
|
|
|
if (!ctx) return;
|
|
if (!ctx) return;
|
|
|
|
|
+ // Draw the source image stretched to fill the square canvas
|
|
|
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
|
|
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
|
|
|
|
|
|
|
|
|
|
+ // Map 640-space coords → canvas pixels via percentage
|
|
|
|
|
+ const scaleX = canvas.width / 640;
|
|
|
|
|
+ const scaleY = canvas.height / 640;
|
|
|
|
|
+
|
|
|
detections.forEach((det: any) => {
|
|
detections.forEach((det: any) => {
|
|
|
- // Backend returns absolute coords in 640×640 space
|
|
|
|
|
- const scaleX = canvas.width / 640;
|
|
|
|
|
- const scaleY = canvas.height / 640;
|
|
|
|
|
const [x1, y1, x2, y2] = det.box;
|
|
const [x1, y1, x2, y2] = det.box;
|
|
|
const color = GRADE_COLORS[det.class] || '#00A651';
|
|
const color = GRADE_COLORS[det.class] || '#00A651';
|
|
|
|
|
|