===== Folder Structure ===== Folder PATH listing for volume New Volume Volume serial number is 36B1-447D E:\TASK\RESEARCH AND DEVELOPMENT\PALM-OIL-AI\MOBILE\SRC | App.tsx | +---components | DetectionOverlay.tsx | TallyDashboard.tsx | +---hooks +---navigation | AppNavigator.tsx | +---screens | DashboardScreen.tsx | GalleryAnalysisScreen.tsx | HistoryScreen.tsx | ScannerScreen.tsx | +---theme | index.ts | \---utils storage.ts yoloParser.ts ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\App.tsx ================================================== import React from 'react'; import { NavigationContainer } from '@react-navigation/native'; import { AppNavigator } from './navigation/AppNavigator'; export default function App() { return ( ); } ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\components\DetectionOverlay.tsx ================================================== import React from 'react'; import { View, StyleSheet, Text } from 'react-native'; import Animated, { useAnimatedStyle } from 'react-native-reanimated'; import { Colors } from '../theme'; import { BoundingBox } from '../utils/yoloParser'; interface DetectionOverlayProps { detections: BoundingBox[]; containerWidth?: number; containerHeight?: number; } export const DetectionOverlay: React.FC = ({ detections, containerWidth, containerHeight }) => { return ( {detections.map((det) => { const x = containerWidth ? det.relX * containerWidth : det.x; const y = containerHeight ? det.relY * containerHeight : det.y; const width = containerWidth ? det.relWidth * containerWidth : det.width; const height = containerHeight ? det.relHeight * containerHeight : det.height; return ( {det.label} ({Math.round(det.confidence * 100)}%) ); })} ); }; const styles = StyleSheet.create({ box: { position: 'absolute', borderWidth: 2, borderRadius: 4, }, labelContainer: { position: 'absolute', top: -24, left: -2, paddingHorizontal: 6, paddingVertical: 2, borderRadius: 4, }, labelText: { color: '#FFF', fontSize: 12, fontWeight: 'bold', } }); ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\components\TallyDashboard.tsx ================================================== import React from 'react'; import { View, StyleSheet, Text } from 'react-native'; import { Colors, Typography } from '../theme'; interface TallyCounts { [key: string]: number; } interface TallyDashboardProps { counts: TallyCounts; } export const TallyDashboard: React.FC = ({ counts }) => { const classNames = [ 'Empty_Bunch', 'Underripe', 'Abnormal', 'Ripe', 'Unripe', 'Overripe' ]; return ( {classNames.map((name, index) => ( {counts[name] || 0} {name} ))} ); }; const styles = StyleSheet.create({ container: { flexDirection: 'row', flexWrap: 'wrap', backgroundColor: 'rgba(15, 23, 42, 0.8)', padding: 12, borderRadius: 12, margin: 16, position: 'absolute', bottom: 40, left: 0, right: 0, justifyContent: 'space-around', borderWidth: 1, borderColor: 'rgba(255, 255, 255, 0.1)', }, item: { alignItems: 'center', minWidth: '30%', marginVertical: 4, }, count: { fontSize: 18, fontWeight: 'bold', }, label: { fontSize: 10, color: Colors.textSecondary, marginTop: 2, textTransform: 'uppercase', } }); ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\navigation\AppNavigator.tsx ================================================== import React from 'react'; import { createNativeStackNavigator } from '@react-navigation/native-stack'; import { DashboardScreen } from '../screens/DashboardScreen'; import { ScannerScreen } from '../screens/ScannerScreen'; import { HistoryScreen } from '../screens/HistoryScreen'; import { GalleryAnalysisScreen } from '../screens/GalleryAnalysisScreen'; import { Colors } from '../theme'; const Stack = createNativeStackNavigator(); export const AppNavigator = () => { return ( ); }; ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\screens\DashboardScreen.tsx ================================================== import React from 'react'; import { StyleSheet, View, Text, TouchableOpacity, SafeAreaView, StatusBar, ScrollView } from 'react-native'; import { Scan, Image as ImageIcon, History, ShieldAlert } from 'lucide-react-native'; import { Colors } from '../theme'; export const DashboardScreen = ({ navigation }: any) => { return ( Palm Oil AI Industrial Management Hub navigation.navigate('Scanner')} > Live Field Scan Real-time ripeness detection & health alerts navigation.navigate('GalleryAnalysis')} > Analyze Gallery Upload & analyze harvested bunches from storage navigation.navigate('History')} > Detection History Review past logs and industrial field journal System Health AI Inference: ACTIVE | Model: V11-INT8 Industrial Suite v4.2.0-stable ); }; const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: Colors.background, }, scrollContent: { paddingBottom: 32, }, header: { padding: 32, paddingTop: 48, }, title: { color: '#FFF', fontSize: 32, fontWeight: 'bold', }, subtitle: { color: Colors.textSecondary, fontSize: 16, marginTop: 4, }, grid: { flex: 1, padding: 24, gap: 16, }, card: { backgroundColor: Colors.surface, padding: 20, borderRadius: 20, borderWidth: 1, borderColor: 'rgba(255,255,255,0.05)', }, alertCard: { borderColor: 'rgba(255, 59, 48, 0.2)', }, iconContainer: { width: 64, height: 64, borderRadius: 16, justifyContent: 'center', alignItems: 'center', marginBottom: 16, }, cardTitle: { color: '#FFF', fontSize: 18, fontWeight: 'bold', }, cardDesc: { color: Colors.textSecondary, fontSize: 14, marginTop: 4, }, footer: { padding: 24, alignItems: 'center', }, versionText: { color: 'rgba(255,255,255,0.3)', fontSize: 12, fontWeight: '500', } }); ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\screens\GalleryAnalysisScreen.tsx ================================================== import React, { useState, useEffect } from 'react'; import { StyleSheet, View, Text, Image, TouchableOpacity, SafeAreaView, ActivityIndicator, Alert, Dimensions } from 'react-native'; import { useNavigation, useRoute } from '@react-navigation/native'; import { launchImageLibrary } from 'react-native-image-picker'; import { useTensorflowModel } from 'react-native-fast-tflite'; import { ArrowLeft, Upload, CheckCircle2, History as HistoryIcon } from 'lucide-react-native'; import { NativeModules } from 'react-native'; const { PixelModule } = NativeModules; import { Colors } from '../theme'; import { parseYoloResults, calculateTally, BoundingBox } from '../utils/yoloParser'; import { saveDetectionRecord } from '../utils/storage'; import { DetectionOverlay } from '../components/DetectionOverlay'; const { width: SCREEN_WIDTH } = Dimensions.get('window'); const base64ToUint8Array = (base64: string) => { if (!base64 || typeof base64 !== 'string') return new Uint8Array(0); const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; const lookup = new Uint8Array(256); for (let i = 0; i < chars.length; i++) { lookup[chars.charCodeAt(i)] = i; } const len = base64.length; // Calculate buffer length (approximate is fine for Uint8Array assignment) const buffer = new Uint8Array(Math.floor((len * 3) / 4)); let p = 0; for (let i = 0; i < len; i += 4) { const encoded1 = lookup[base64.charCodeAt(i)]; const encoded2 = lookup[base64.charCodeAt(i + 1)]; const encoded3 = lookup[base64.charCodeAt(i + 2)] || 0; const encoded4 = lookup[base64.charCodeAt(i + 3)] || 0; buffer[p++] = (encoded1 << 2) | (encoded2 >> 4); if (p < buffer.length) buffer[p++] = ((encoded2 & 15) << 4) | (encoded3 >> 2); if (p < buffer.length) buffer[p++] = ((encoded3 & 3) << 6) | (encoded4 & 63); } return buffer; }; // Removed manual base64ToUint8Array as we now use imageToRgb export const GalleryAnalysisScreen = () => { const navigation = useNavigation(); const route = useRoute(); const [imageUri, setImageUri] = useState(null); const [fileName, setFileName] = useState(null); const [isAnalyzing, setIsAnalyzing] = useState(false); const [detections, setDetections] = useState([]); const [counts, setCounts] = useState>({}); const [containerSize, setContainerSize] = useState({ width: 0, height: 0 }); const model = useTensorflowModel(require('../../assets/best.tflite')); useEffect(() => { if (route.params?.imageUri) { setImageUri(route.params.imageUri); setDetections([]); setCounts({}); } else { handlePickImage(); } }, [route.params]); const handlePickImage = async () => { try { const result = await launchImageLibrary({ mediaType: 'photo', includeBase64: true, quality: 1, }); if (result.assets && result.assets[0]) { setImageUri(result.assets[0].uri || null); setFileName(result.assets[0].fileName || null); setDetections([]); setCounts({}); } else { navigation.goBack(); } } catch (error) { console.error('Pick Image Error:', error); Alert.alert('Error', 'Failed to pick image'); navigation.goBack(); } }; const analyzeImage = async (uri: string | null) => { if (!uri || model.state !== 'loaded') return; setIsAnalyzing(true); try { // 1. & 2. CRITICAL FIX: Use internal native bridge to get 640x640 RGB pixels const base64Data = await PixelModule.getPixelsFromUri(uri); const uint8Array = base64ToUint8Array(base64Data); // Convert to Int8Array for the quantized model const inputTensor = new Int8Array(uint8Array.buffer); if (inputTensor.length !== 640 * 640 * 3) { console.warn(`Buffer size mismatch: ${inputTensor.length} vs 1228800.`); } const resultsRaw = model.model.runSync([inputTensor]); const results = parseYoloResults(resultsRaw[0], 640, 640); if (results.length === 0) { Alert.alert('No Detections', 'No palm oil bunches were detected in this image.'); setDetections([]); setCounts({}); return; } setDetections(results); const tally = calculateTally(results); setCounts(tally); // Save to history saveDetectionRecord({ label: results[0].label, confidence: results[0].confidence, classId: results[0].classId, imageUri: uri, fileName: fileName || undefined, detections: results, counts: tally, }); } catch (error) { console.error('Inference Error:', error); Alert.alert('Analysis Error', 'Failed to analyze the image'); } finally { setIsAnalyzing(false); } }; return ( navigation.goBack()} style={styles.backButton}> Gallery Analysis {fileName && {fileName}} {imageUri ? ( { const { width, height } = event.nativeEvent.layout; setContainerSize({ width, height }); }} > {!isAnalyzing && } {isAnalyzing && ( AI ANALYZING... )} ) : ( )} {!isAnalyzing && detections.length > 0 && ( Analysis Complete {Object.entries(counts).map(([label, count]) => ( {label}: {count} ))} navigation.navigate('History')} > View in Field Journal )} {imageUri && !isAnalyzing && detections.length === 0 && ( analyzeImage(imageUri)} > Start Analysis )} {(detections.length > 0 || !imageUri) && ( {imageUri ? 'Pick Another Image' : 'Select Image'} )} ); }; const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: Colors.background, }, header: { flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', padding: 16, }, backButton: { padding: 8, backgroundColor: 'rgba(255,255,255,0.05)', borderRadius: 12, }, title: { color: '#FFF', fontSize: 20, fontWeight: 'bold', }, fileNameText: { color: Colors.textSecondary, fontSize: 12, fontWeight: '500', }, content: { flex: 1, padding: 16, }, imageContainer: { width: '100%', aspectRatio: 1, backgroundColor: '#000', borderRadius: 20, overflow: 'hidden', position: 'relative', borderWidth: 1, borderColor: 'rgba(255,255,255,0.1)', }, image: { width: '100%', height: '100%', }, loadingOverlay: { ...StyleSheet.absoluteFillObject, backgroundColor: 'rgba(15, 23, 42, 0.8)', justifyContent: 'center', alignItems: 'center', gap: 16, }, loadingText: { color: '#FFF', fontSize: 14, fontWeight: '800', letterSpacing: 2, }, emptyContainer: { flex: 1, justifyContent: 'center', alignItems: 'center', }, resultCard: { marginTop: 24, backgroundColor: Colors.surface, padding: 24, borderRadius: 24, borderWidth: 1, borderColor: 'rgba(255,255,255,0.05)', }, resultHeader: { flexDirection: 'row', alignItems: 'center', gap: 12, marginBottom: 20, }, resultTitle: { color: '#FFF', fontSize: 18, fontWeight: 'bold', }, statsContainer: { gap: 12, marginBottom: 24, }, statRow: { flexDirection: 'row', justifyContent: 'space-between', paddingVertical: 8, borderBottomWidth: 1, borderBottomColor: 'rgba(255,255,255,0.05)', }, statLabel: { color: Colors.textSecondary, fontSize: 16, }, statValue: { color: '#FFF', fontSize: 16, fontWeight: 'bold', }, historyButton: { flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: 'rgba(255,255,255,0.05)', padding: 16, borderRadius: 16, gap: 10, }, historyButtonText: { color: '#FFF', fontSize: 14, fontWeight: '600', }, reUploadButton: { flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: Colors.info, margin: 24, padding: 18, borderRadius: 18, gap: 12, }, reUploadText: { color: '#FFF', fontSize: 16, fontWeight: 'bold', }, analyzeButton: { flexDirection: 'row', alignItems: 'center', justifyContent: 'center', backgroundColor: Colors.success, margin: 24, padding: 18, borderRadius: 18, gap: 12, }, analyzeButtonText: { color: '#FFF', fontSize: 16, fontWeight: 'bold', } }); ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\screens\HistoryScreen.tsx ================================================== import React, { useState, useCallback } from 'react'; import { StyleSheet, View, Text, FlatList, TouchableOpacity, RefreshControl, Image, Alert } from 'react-native'; import { useFocusEffect } from '@react-navigation/native'; import { Trash2, Clock, CheckCircle, AlertTriangle, Square, CheckSquare, X, Trash } from 'lucide-react-native'; import { getHistory, clearHistory, deleteRecords, DetectionRecord } from '../utils/storage'; import { Colors } from '../theme'; import { DetectionOverlay } from '../components/DetectionOverlay'; const HistoryCard = ({ item, expandedId, setExpandedId, toggleSelect, isSelectMode, selectedIds, handleLongPress }: any) => { const [imgSize, setImgSize] = useState({ w: 0, h: 0 }); const isExpanded = expandedId === item.id; const isSelected = selectedIds.includes(item.id); const toggleExpand = (id: string) => { if (isSelectMode) { toggleSelect(id); } else { setExpandedId(expandedId === id ? null : id); } }; const date = new Date(item.timestamp); const timeStr = date.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' }); const dateStr = date.toLocaleDateString(); return ( toggleExpand(item.id)} onLongPress={() => handleLongPress(item.id)} style={[ styles.card, item.isHealthAlert && styles.alertCard, isSelected && styles.selectedCard ]} > {isSelectMode ? ( isSelected ? ( ) : ( ) ) : item.isHealthAlert ? ( ) : ( )} {item.label} {(item.confidence * 100).toFixed(1)}% Conf. {isExpanded && item.imageUri && ( setImgSize({ w: e.nativeEvent.layout.width, h: e.nativeEvent.layout.height })} > {imgSize.w > 0 && ( )} )} {Object.entries(item.counts).map(([label, count]: [string, any]) => ( {label}: {count} ))} {dateStr} at {timeStr} {item.fileName && ( {item.fileName} )} ); }; export const HistoryScreen = () => { const [history, setHistory] = useState([]); const [refreshing, setRefreshing] = useState(false); const [expandedId, setExpandedId] = useState(null); const [isSelectMode, setIsSelectMode] = useState(false); const [selectedIds, setSelectedIds] = useState([]); const fetchHistory = async () => { const data = await getHistory(); setHistory(data); }; useFocusEffect( useCallback(() => { fetchHistory(); }, []) ); const onRefresh = async () => { setRefreshing(true); await fetchHistory(); setRefreshing(false); }; const handleClearAll = () => { Alert.alert( "Delete All Logs", "This action will permanently wipe your entire industrial field journal. Are you sure?", [ { text: "Cancel", style: "cancel" }, { text: "Delete All", style: "destructive", onPress: async () => { await clearHistory(); setHistory([]); setIsSelectMode(false); setSelectedIds([]); } } ] ); }; const handleDeleteSelected = () => { Alert.alert( "Delete Selected", `Are you sure you want to delete ${selectedIds.length} records?`, [ { text: "Cancel", style: "cancel" }, { text: "Delete", style: "destructive", onPress: async () => { await deleteRecords(selectedIds); setSelectedIds([]); setIsSelectMode(false); fetchHistory(); } } ] ); }; const toggleSelect = (id: string) => { if (selectedIds.includes(id)) { setSelectedIds(selectedIds.filter((idx: string) => idx !== id)); } else { setSelectedIds([...selectedIds, id]); } }; const toggleExpand = (id: string) => { if (isSelectMode) { toggleSelect(id); } else { setExpandedId(expandedId === id ? null : id); } }; const handleLongPress = (id: string) => { if (!isSelectMode) { setIsSelectMode(true); setSelectedIds([id]); } }; const exitSelectionMode = () => { setIsSelectMode(false); setSelectedIds([]); }; const renderItem = ({ item }: { item: DetectionRecord }) => ( ); return ( Field Journal {isSelectMode && ( {selectedIds.length} Selected )} {history.length > 0 && ( isSelectMode ? ( ) : ( <> Delete All setIsSelectMode(true)} style={styles.iconButton}> ) )} {history.length === 0 ? ( No detections recorded yet. Perform detections in the Scanner tab to see them here. ) : ( item.id} renderItem={renderItem} contentContainerStyle={styles.listContent} refreshControl={ } /> {isSelectMode && selectedIds.length > 0 && ( Delete Selected ({selectedIds.length}) Delete All )} )} ); }; const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: Colors.background, }, header: { padding: 24, paddingBottom: 16, flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', }, title: { color: '#FFF', fontSize: 28, fontWeight: 'bold', }, selectionCount: { color: Colors.info, fontSize: 14, fontWeight: '500', marginTop: 2, }, headerActions: { flexDirection: 'row', gap: 8, }, iconButton: { padding: 8, backgroundColor: 'rgba(255,255,255,0.05)', borderRadius: 12, }, clearButton: { padding: 8, }, listContent: { padding: 16, paddingTop: 0, }, card: { backgroundColor: Colors.surface, borderRadius: 16, padding: 16, marginBottom: 16, borderWidth: 1, borderColor: 'rgba(255,255,255,0.05)', }, selectedCard: { borderColor: Colors.info, borderWidth: 2, backgroundColor: 'rgba(0, 122, 255, 0.05)', }, alertCard: { borderColor: 'rgba(255, 59, 48, 0.3)', borderLeftWidth: 4, borderLeftColor: Colors.error, }, expandedContent: { marginVertical: 12, borderRadius: 12, overflow: 'hidden', backgroundColor: '#000', }, imageWrapper: { width: '100%', aspectRatio: 1, position: 'relative', }, detailImage: { width: '100%', height: '100%', }, cardHeader: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', marginBottom: 12, }, labelContainer: { flexDirection: 'row', alignItems: 'center', gap: 8, }, label: { fontSize: 18, fontWeight: 'bold', }, confidence: { color: Colors.textSecondary, fontSize: 14, }, cardBody: { paddingVertical: 12, borderTopWidth: 1, borderBottomWidth: 1, borderColor: 'rgba(255,255,255,0.05)', }, tallyContainer: { flexDirection: 'row', flexWrap: 'wrap', gap: 12, }, tallyItem: { flexDirection: 'row', gap: 4, }, tallyLabel: { color: Colors.textSecondary, fontSize: 12, }, tallyCount: { color: '#FFF', fontSize: 12, fontWeight: 'bold', }, cardFooter: { flexDirection: 'row', alignItems: 'center', gap: 6, marginTop: 12, }, footerText: { color: Colors.textSecondary, fontSize: 12, }, emptyState: { flex: 1, justifyContent: 'center', alignItems: 'center', padding: 32, }, emptyText: { color: '#FFF', fontSize: 18, fontWeight: 'bold', marginTop: 16, }, emptySubtext: { color: Colors.textSecondary, textAlign: 'center', marginTop: 8, }, bottomActions: { position: 'absolute', bottom: 24, left: 24, right: 24, backgroundColor: Colors.error, borderRadius: 16, elevation: 8, shadowColor: '#000', shadowOffset: { width: 0, height: 4 }, shadowOpacity: 0.3, shadowRadius: 8, flexDirection: 'row', overflow: 'hidden', }, deleteSelectionButton: { flexDirection: 'row', alignItems: 'center', justifyContent: 'center', padding: 16, gap: 12, flex: 1.5, }, deleteButtonText: { color: '#FFF', fontSize: 14, fontWeight: 'bold', }, clearHeaderButton: { flexDirection: 'row', alignItems: 'center', gap: 6, paddingVertical: 8, paddingHorizontal: 12, backgroundColor: 'rgba(255, 59, 48, 0.1)', borderRadius: 12, }, clearHeaderText: { color: Colors.error, fontSize: 12, fontWeight: 'bold', }, clearAllButton: { flexDirection: 'row', alignItems: 'center', justifyContent: 'center', padding: 16, gap: 12, borderLeftWidth: 1, borderLeftColor: 'rgba(255,255,255,0.2)', flex: 1, }, }); ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\screens\ScannerScreen.tsx ================================================== import React, { useState, useEffect } from 'react'; import { StyleSheet, View, Text, StatusBar, SafeAreaView, TouchableOpacity, Image } from 'react-native'; import { useIsFocused } from '@react-navigation/native'; import { Camera, useCameraDevice, useCameraPermission, useFrameProcessor, useCameraFormat } from 'react-native-vision-camera'; import { useTensorflowModel } from 'react-native-fast-tflite'; import { runOnJS } from 'react-native-reanimated'; import { launchImageLibrary } from 'react-native-image-picker'; import { parseYoloResults, calculateTally, BoundingBox } from '../utils/yoloParser'; import { saveDetectionRecord } from '../utils/storage'; import { DetectionOverlay } from '../components/DetectionOverlay'; import { TallyDashboard } from '../components/TallyDashboard'; import { Colors } from '../theme'; import { Image as ImageIcon, Upload } from 'lucide-react-native'; export const ScannerScreen = ({ route }: any) => { const isFocused = useIsFocused(); const { hasPermission, requestPermission } = useCameraPermission(); const device = useCameraDevice('back'); const [detections, setDetections] = useState([]); const [counts, setCounts] = useState>({}); const [cameraInitialized, setCameraInitialized] = useState(false); const [lastSavedTime, setLastSavedTime] = useState(0); // Load the model const model = useTensorflowModel(require('../../assets/best.tflite')); // Find a format that matches 640x640 or closest small resolution const format = useCameraFormat(device, [ { videoResolution: { width: 640, height: 480 } }, { fps: 30 } ]); useEffect(() => { if (!hasPermission) { requestPermission(); } }, [hasPermission]); const frameProcessor = useFrameProcessor((frame) => { 'worklet'; if (model.state === 'loaded') { try { // FALLBACK: Without the resize plugin, we pass the raw buffer. // Fast-TFLite might handle resizing if we are lucky with the input. // In the next step, we will select a 640x480 format to get closer to 640x640. const buffer = frame.toArrayBuffer(); const result = model.model.runSync([new Int8Array(buffer)]); const boxes = parseYoloResults(result[0], frame.width, frame.height); runOnJS(setDetections)(boxes); const currentCounts = calculateTally(boxes); runOnJS(setCounts)(currentCounts); if (boxes.length > 0) { runOnJS(handleAutoSave)(boxes, currentCounts); } } catch (e) { console.error('AI Inference Detail:', e); } } }, [model]); const handleAutoSave = (boxes: BoundingBox[], currentCounts: Record) => { const now = Date.now(); if (now - lastSavedTime > 5000) { const topDet = boxes.reduce((prev, current) => (prev.confidence > current.confidence) ? prev : current); saveDetectionRecord({ label: topDet.label, confidence: topDet.confidence, classId: topDet.classId, detections: boxes, counts: currentCounts }); setLastSavedTime(now); } }; if (!hasPermission) return ( ERROR: No Camera Permission ); if (!device) return ( ERROR: No Camera Device Found ); return ( {isFocused && ( { console.log('Camera: Initialized'); setCameraInitialized(true); }} onError={(error) => console.error('Camera: Error', error)} /> )} Live Scanner {model.state === 'loaded' ? '● AI ACTIVE' : `○ ${model.state.toUpperCase()}`} Cam: {cameraInitialized ? 'READY' : 'STARTING...'} | Model: {model.state.toUpperCase()} | Dets: {detections.length} ); }; const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: Colors.background, }, overlay: { flex: 1, }, header: { padding: 16, flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', }, title: { color: '#FFF', fontSize: 16, fontWeight: 'bold', letterSpacing: 0.5, }, status: { color: Colors.success, fontSize: 11, fontWeight: '800', }, text: { color: '#FFF', textAlign: 'center', fontSize: 18, fontWeight: 'bold', }, galleryButton: { position: 'absolute', bottom: 100, right: 20, backgroundColor: 'rgba(30, 41, 59, 0.8)', padding: 16, borderRadius: 30, borderWidth: 1, borderColor: 'rgba(255,255,255,0.2)', }, debugBox: { position: 'absolute', top: 60, left: 20, right: 20, backgroundColor: 'rgba(255,255,255,0.9)', padding: 8, borderRadius: 8, }, debugText: { color: '#000', fontSize: 12, fontWeight: '600', textAlign: 'center', } }); ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\theme\index.ts ================================================== export const Colors = { // Industrial Alert Colors error: '#FF3B30', // High-visibility Red for Abnormal/Empty_Bunch warning: '#FFCC00', // Yellow for Penalty/Underripe success: '#34C759', // Green for Ripe info: '#007AFF', // Blue for Overripe (processing focus) // Base Palette background: '#0F172A', // Deep Slate surface: '#1E293B', text: '#F8FAFC', textSecondary: '#94A3B8', // Class Mapping Colors classes: { 0: '#FF3B30', // Empty_Bunch (Alert) 1: '#FFCC00', // Underripe (Warning) 2: '#FF3B30', // Abnormal (Health Alert) 3: '#34C759', // Ripe (Success) 4: '#FF9500', // Unripe (Penalty) 5: '#AF52DE', // Overripe (FFA Prevention) } }; export const Typography = { header: { fontSize: 24, fontWeight: 'bold', color: Colors.text, }, body: { fontSize: 16, color: Colors.textSecondary, }, label: { fontSize: 12, fontWeight: '600', textTransform: 'uppercase', } }; ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\utils\storage.ts ================================================== import AsyncStorage from '@react-native-async-storage/async-storage'; import { BoundingBox } from './yoloParser'; export interface DetectionRecord { id: string; timestamp: string; label: string; confidence: number; classId: number; isHealthAlert: boolean; imageUri?: string; fileName?: string; detections: BoundingBox[]; counts: Record; } const STORAGE_KEY = 'palm_history'; /** * Saves a new detection record to local storage. */ export const saveDetectionRecord = async (record: Omit) => { try { const existing = await AsyncStorage.getItem(STORAGE_KEY); const history: DetectionRecord[] = existing ? JSON.parse(existing) : []; const newRecord: DetectionRecord = { ...record, id: Date.now().toString(), timestamp: new Date().toISOString(), isHealthAlert: record.detections.some(d => d.classId === 0 || d.classId === 2) }; await AsyncStorage.setItem(STORAGE_KEY, JSON.stringify([newRecord, ...history])); console.log('Storage: Record saved successfully'); } catch (error) { console.error('Storage: Error saving record', error); } }; /** * Retrieves all detection records from local storage. */ export const getHistory = async (): Promise => { try { const existing = await AsyncStorage.getItem(STORAGE_KEY); return existing ? JSON.parse(existing) : []; } catch (error) { console.error('Storage: Error fetching history', error); return []; } }; /** * Clears all detection records from local storage. */ export const clearHistory = async () => { try { await AsyncStorage.removeItem(STORAGE_KEY); console.log('Storage: History cleared'); } catch (error) { console.error('Storage: Error clearing history', error); } }; /** * Deletes specific records from local storage. */ export const deleteRecords = async (ids: string[]) => { try { const existing = await AsyncStorage.getItem(STORAGE_KEY); if (!existing) return; const history: DetectionRecord[] = JSON.parse(existing); const updated = history.filter(record => !ids.includes(record.id)); await AsyncStorage.setItem(STORAGE_KEY, JSON.stringify(updated)); console.log(`Storage: ${ids.length} records deleted`); } catch (error) { console.error('Storage: Error deleting records', error); } }; ================================================== FILE: E:\Task\Research and Development\palm-oil-ai\mobile\src\utils\yoloParser.ts ================================================== export interface BoundingBox { id: string; x: number; y: number; width: number; height: number; relX: number; relY: number; relWidth: number; relHeight: number; label: string; confidence: number; classId: number; } const CLASS_NAMES = [ 'Empty_Bunch', 'Underripe', 'Abnormal', 'Ripe', 'Unripe', 'Overripe' ]; /** * Parses YOLOv8/v11 output tensor into BoundingBox objects. * Format: [x1, y1, x2, y2, score, classId] * Quantization: scale=0.019916336983442307, zeroPoint=-124 */ /** * Normalizes a raw pixel buffer to 0.0-1.0 range for Float32 models. */ export function normalizeTensor(buffer: ArrayBuffer, width: number, height: number): Float32Array { 'worklet'; const data = new Uint8Array(buffer); const normalized = new Float32Array(width * height * 3); for (let i = 0; i < data.length; i++) { normalized[i] = data[i] / 255.0; } return normalized; } export function parseYoloResults( tensor: Int8Array | Uint8Array | Float32Array | any, frameWidth: number, frameHeight: number ): BoundingBox[] { 'worklet'; // Detection parameters from INT8 model const scale = 0.019916336983442307; const zeroPoint = -124; const numDetections = 300; const numElements = 6; const detections: BoundingBox[] = []; const data = tensor; if (!data || data.length === 0) return []; for (let i = 0; i < numDetections; i++) { const base = i * numElements; if (base + 5 >= data.length) break; // Handle Float32 vs Quantized Int8 const getVal = (idx: number) => { const val = data[idx]; if (data instanceof Float32Array) return val; return (val - zeroPoint) * scale; }; const x1 = getVal(base + 0); const y1 = getVal(base + 1); const x2 = getVal(base + 2); const y2 = getVal(base + 3); const score = getVal(base + 4); const classId = Math.round(getVal(base + 5)); if (score > 0.45 && classId >= 0 && classId < CLASS_NAMES.length) { const normalizedX1 = x1 / 640; const normalizedY1 = y1 / 640; const normalizedX2 = x2 / 640; const normalizedY2 = y2 / 640; detections.push({ id: `det_${i}_${Math.random().toString(36).substr(2, 9)}`, x: Math.max(0, normalizedX1 * frameWidth), y: Math.max(0, normalizedY1 * frameHeight), width: Math.max(0, (normalizedX2 - normalizedX1) * frameWidth), height: Math.max(0, (normalizedY2 - normalizedY1) * frameHeight), relX: normalizedX1, relY: normalizedY1, relWidth: normalizedX2 - normalizedX1, relHeight: normalizedY2 - normalizedY1, label: CLASS_NAMES[classId], confidence: score, classId: classId }); } } return detections; } export function calculateTally(detections: BoundingBox[]) { 'worklet'; const counts: { [key: string]: number } = {}; for (const det of detections) { counts[det.label] = (counts[det.label] || 0) + 1; } return counts; }