import 'dart:io'; import 'dart:ui'; import 'dart:async'; import 'package:flutter/material.dart'; import 'package:camera/camera.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:path_provider/path_provider.dart'; import 'package:path/path.dart' as p; import '../services/tflite_service.dart'; import '../services/database_helper.dart'; import '../models/palm_record.dart'; enum DetectionState { searching, locking, capturing, cooldown } class LiveAnalysisScreen extends StatefulWidget { const LiveAnalysisScreen({super.key}); @override State createState() => _LiveAnalysisScreenState(); } class _LiveAnalysisScreenState extends State { CameraController? _controller; final TfliteService _tfliteService = TfliteService(); final DatabaseHelper _dbHelper = DatabaseHelper(); bool _isInitialized = false; bool _isProcessing = false; int _frameCount = 0; List? _detections; // Detection Lock Logic DetectionState _state = DetectionState.searching; static const double _lockThreshold = 0.60; static const int _frameThrottle = 2; // Check frames more frequently final List _detectionHistory = List.filled(20, false, growable: true); // 20 frames buffer static const int _requiredHits = 4; // Threshold for momentum ticks int _currentHits = 0; // Track hits for the timer Timer? _lockTimer; Timer? _cooldownTimer; double _lockProgress = 0.0; bool _showFlash = false; @override void initState() { super.initState(); _initializeCamera(); } Future _initializeCamera() async { final status = await Permission.camera.request(); if (status.isDenied) return; final cameras = await availableCameras(); if (cameras.isEmpty) return; _controller = CameraController( cameras[0], ResolutionPreset.low, // Downgraded resolution for performance enableAudio: false, imageFormatGroup: Platform.isAndroid ? ImageFormatGroup.yuv420 : ImageFormatGroup.bgra8888, ); try { await _controller!.initialize(); await _tfliteService.initModel(); _controller!.startImageStream(_handleImageStream); if (mounted) { setState(() { _isInitialized = true; }); } } catch (e) { print("Camera init error: $e"); } } void _handleImageStream(CameraImage image) { if (_isProcessing || _state == DetectionState.capturing || _state == DetectionState.cooldown) return; _frameCount++; if (_frameCount % _frameThrottle != 0) return; _processStreamFrame(image); } Future _processStreamFrame(CameraImage image) async { setState(() => _isProcessing = true); try { final detections = await _tfliteService.runInferenceOnStream(image); bool currentFrameHasFruit = false; if (detections.isNotEmpty) { currentFrameHasFruit = detections.any((d) => d.confidence > _lockThreshold); } // Update Sliding Window Buffer _detectionHistory.removeAt(0); _detectionHistory.add(currentFrameHasFruit); _currentHits = _detectionHistory.where((h) => h).length; if (!mounted) return; setState(() { _detections = detections; }); if (_state == DetectionState.searching) { if (_currentHits >= _requiredHits) { setState(() { _state = DetectionState.locking; _lockProgress = 0.0; }); _startLockTimer(); } } // Removed the old strict cancel logic. // _startLockTimer now safely handles momentum drain. } catch (e) { print("Stream processing error: $e"); } finally { if (mounted) { setState(() => _isProcessing = false); } } } void _startLockTimer() { _lockTimer?.cancel(); const duration = Duration(milliseconds: 100); int momentumTicks = 0; _lockTimer = Timer.periodic(duration, (timer) { if (!mounted) { timer.cancel(); return; } // Momentum logic: add or subtract if (_currentHits >= _requiredHits) { momentumTicks++; } else { momentumTicks--; } if (momentumTicks < 0) momentumTicks = 0; setState(() { _lockProgress = (momentumTicks / 3.0).clamp(0.0, 1.0); // 3 ticks target }); if (momentumTicks >= 3) { timer.cancel(); if (_state == DetectionState.locking) { _triggerCapture(); } } else if (momentumTicks <= 0 && _state == DetectionState.locking) { // Complete momentum loss -> Cancel lock timer.cancel(); setState(() { _state = DetectionState.searching; _lockProgress = 0.0; }); } }); } void _cancelLockTimer() { _lockTimer?.cancel(); _lockTimer = null; } Future _triggerCapture() async { setState(() { _state = DetectionState.capturing; _lockProgress = 1.0; _showFlash = true; }); // Quick 200ms white flash without blocking Future.delayed(const Duration(milliseconds: 200), () { if (mounted) setState(() => _showFlash = false); }); await _captureAndAnalyze(); } Future _captureAndAnalyze() async { if (_controller == null || !_controller!.value.isInitialized) return; // 1. Stop stream to avoid resource conflict await _controller!.stopImageStream(); if (!mounted) return; try { // 2. Take high-res picture final XFile photo = await _controller!.takePicture(); // 3. Run final inference on high-res final detections = await _tfliteService.runInference(photo.path); if (detections.isNotEmpty) { // 4. Archive final appDocDir = await getApplicationDocumentsDirectory(); final fileName = p.basename(photo.path); final persistentPath = p.join(appDocDir.path, 'palm_live_${DateTime.now().millisecondsSinceEpoch}_$fileName'); await File(photo.path).copy(persistentPath); final best = detections.first; final record = PalmRecord( imagePath: persistentPath, ripenessClass: best.className, confidence: best.confidence, timestamp: DateTime.now(), x1: best.normalizedBox.left, y1: best.normalizedBox.top, x2: best.normalizedBox.right, y2: best.normalizedBox.bottom, detections: detections.map((d) => { 'className': d.className, 'classIndex': d.classIndex, 'confidence': d.confidence, 'x1': d.normalizedBox.left, 'y1': d.normalizedBox.top, 'x2': d.normalizedBox.right, 'y2': d.normalizedBox.bottom, }).toList(), ); await _dbHelper.insertRecord(record); // 5. Show result and resume camera if (mounted) { await _showResultSheet(record); _startCooldown(); } } else { if (mounted) { ScaffoldMessenger.of(context).showSnackBar( const SnackBar(content: Text("No palm bunches detected in final snap.")) ); _startCooldown(); } } } catch (e) { print("Capture error: $e"); if (mounted) _startCooldown(); } } void _startCooldown() { if (!mounted) return; setState(() { _state = DetectionState.cooldown; _detections = null; // Clear boxes }); // Clear detection history to ignore old hits _detectionHistory.fillRange(0, _detectionHistory.length, false); _cooldownTimer?.cancel(); _cooldownTimer = Timer(const Duration(seconds: 3), () { if (!mounted) return; setState(() { _state = DetectionState.searching; }); _controller?.startImageStream(_handleImageStream); }); } Future _showResultSheet(PalmRecord record) async { Color statusColor = const Color(0xFFFF9800); // Default orange if (record.ripenessClass == 'Empty_Bunch' || record.ripenessClass == 'Abnormal') { statusColor = const Color(0xFFF44336); } else if (record.ripenessClass == 'Ripe' || record.ripenessClass == 'Overripe') { statusColor = const Color(0xFF4CAF50); } await showModalBottomSheet( context: context, isScrollControlled: true, isDismissible: false, enableDrag: false, shape: const RoundedRectangleBorder(borderRadius: BorderRadius.vertical(top: Radius.circular(20))), builder: (context) => Container( padding: const EdgeInsets.all(24), child: Column( mainAxisSize: MainAxisSize.min, children: [ Icon(Icons.check_circle, color: statusColor, size: 64), const SizedBox(height: 16), Text(record.ripenessClass, style: const TextStyle(fontSize: 28, fontWeight: FontWeight.bold)), Text("Confidence: ${(record.confidence * 100).toStringAsFixed(1)}%", style: const TextStyle(color: Colors.grey)), const SizedBox(height: 24), if (record.ripenessClass == 'Empty_Bunch' || record.ripenessClass == 'Abnormal') Container( padding: const EdgeInsets.all(12), decoration: BoxDecoration(color: Colors.red.shade50, borderRadius: BorderRadius.circular(8)), child: const Text("HEALTH ALERT: Abnormal detected!", style: TextStyle(color: Colors.red, fontWeight: FontWeight.bold)), ), const SizedBox(height: 24), SizedBox( width: double.infinity, child: ElevatedButton( onPressed: () => Navigator.pop(context), child: const Text("Done"), ), ), ], ), ), ); } @override Widget build(BuildContext context) { if (!_isInitialized || _controller == null) { return const Scaffold(body: Center(child: CircularProgressIndicator())); } final isLockedVisual = _state == DetectionState.locking || _state == DetectionState.capturing; return Scaffold( backgroundColor: Colors.black, body: Stack( children: [ // Camera Preview Center( child: CameraPreview(_controller!), ), // Bounding Box Overlays if (_detections != null && _state != DetectionState.capturing && _state != DetectionState.cooldown) Positioned.fill( child: LayoutBuilder( builder: (context, constraints) { return Stack( children: _detections! .map((d) => _buildOverlayBox(d, constraints)) .toList(), ); }, ), ), // Top Info Bar Positioned( top: 40, left: 20, right: 20, child: Container( padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 8), decoration: BoxDecoration( color: Colors.black54, borderRadius: BorderRadius.circular(20), ), child: Row( children: [ Icon( _state == DetectionState.cooldown ? Icons.pause_circle_filled : isLockedVisual ? Icons.lock : Icons.center_focus_weak, color: _state == DetectionState.cooldown ? Colors.blue : isLockedVisual ? Colors.green : Colors.yellow, ), const SizedBox(width: 8), Text( _state == DetectionState.cooldown ? "COOLDOWN" : isLockedVisual ? "LOCKING" : "SEARCHING...", style: TextStyle( color: _state == DetectionState.cooldown ? Colors.blue : isLockedVisual ? Colors.green : Colors.yellow, fontWeight: FontWeight.bold, ), ), const Spacer(), IconButton( icon: const Icon(Icons.close, color: Colors.white), onPressed: () => Navigator.pop(context), ), ], ), ), ), // Progress Overlay for Locking if (_state == DetectionState.locking) Positioned.fill( child: Center( child: SizedBox( width: 120, height: 120, child: TweenAnimationBuilder( tween: Tween(begin: 0.0, end: _lockProgress), duration: const Duration(milliseconds: 100), builder: (context, value, _) => CircularProgressIndicator( value: value, strokeWidth: 8, color: Colors.greenAccent, backgroundColor: Colors.white24, ), ), ), ), ), // White flash overlay Positioned.fill( child: IgnorePointer( child: AnimatedOpacity( opacity: _showFlash ? 1.0 : 0.0, duration: const Duration(milliseconds: 200), child: Container(color: Colors.white), ), ), ), if (_state == DetectionState.capturing && !_showFlash) Positioned.fill( child: Container( color: Colors.black45, child: const Center( child: CircularProgressIndicator(color: Colors.white), ), ), ), if (_state == DetectionState.cooldown) Positioned.fill( child: Container( color: Colors.black45, child: const Center( child: Text( "Resuming scan...", style: TextStyle(color: Colors.white, fontSize: 18, fontWeight: FontWeight.bold), ), ), ), ), // Bottom Hint if (_state == DetectionState.searching) const Positioned( bottom: 40, left: 0, right: 0, child: Center( child: Text( "Hold steady to lock target", style: TextStyle(color: Colors.white, fontWeight: FontWeight.w500), ), ), ), ], ), ); } Widget _buildOverlayBox(DetectionResult detection, BoxConstraints constraints) { final rect = detection.normalizedBox; // Show green only if the system is overall "Locked" and this detection is high confidence final color = ((_state == DetectionState.locking || _state == DetectionState.capturing) && detection.confidence > _lockThreshold) ? Colors.green : Colors.yellow; return Positioned( left: rect.left * constraints.maxWidth, top: rect.top * constraints.maxHeight, width: rect.width * constraints.maxWidth, height: rect.height * constraints.maxHeight, child: Container( decoration: BoxDecoration( border: Border.all(color: color, width: 2), borderRadius: BorderRadius.circular(4), ), child: Align( alignment: Alignment.topLeft, child: Container( padding: const EdgeInsets.symmetric(horizontal: 4, vertical: 2), color: color, child: Text( "${(detection.confidence * 100).toStringAsFixed(0)}%", style: const TextStyle(color: Colors.white, fontSize: 10, fontWeight: FontWeight.bold), ), ), ), ), ); } @override void dispose() { _lockTimer?.cancel(); _cooldownTimer?.cancel(); _controller?.dispose(); _tfliteService.dispose(); super.dispose(); } }