import streamlit as st import requests from PIL import Image import io import base64 # --- 1. Global Backend Check --- API_BASE_URL = "http://localhost:8000" def check_backend(): try: res = requests.get(f"{API_BASE_URL}/get_confidence", timeout=2) return res.status_code == 200 except: return False backend_active = check_backend() if not backend_active: st.error("⚠️ Backend API is offline!") st.info("Please start the backend server first (e.g., `python main.py`) to unlock AI features.") if st.button("🔄 Retry Connection"): st.rerun() st.stop() # Stops execution here, effectively disabling the app # --- 2. Main Page Config (Only rendered if backend is active) --- st.set_page_config(page_title="Palm Oil Ripeness AI", layout="wide") st.title("🌴 Palm Oil FFB Management System") st.markdown("### Production-Ready AI Analysis & Archival") # --- Sidebar --- st.sidebar.header("Backend Controls") def update_confidence(): new_conf = st.session_state.conf_slider try: requests.post(f"{API_BASE_URL}/set_confidence", json={"threshold": new_conf}) st.toast(f"Threshold updated to {new_conf}") except: st.sidebar.error("Failed to update threshold") # We already know backend is up here response = requests.get(f"{API_BASE_URL}/get_confidence") current_conf = response.json().get("current_confidence", 0.25) st.sidebar.success(f"Connected to API") # Synchronized Slider st.sidebar.slider( "Confidence Threshold", 0.1, 1.0, value=float(current_conf), key="conf_slider", on_change=update_confidence ) # --- Tabs --- tab1, tab2, tab3 = st.tabs(["Single Analysis", "Batch Processing", "Similarity Search"]) # --- Tab 1: Single Analysis --- with tab1: st.subheader("Analyze Single Bunch") uploaded_file = st.file_uploader("Upload a bunch image...", type=["jpg", "jpeg", "png"], key="single") if uploaded_file: col1, col2 = st.columns(2) with col1: st.image(uploaded_file, caption="Input", width=500) with col2: if st.button("Run Full Analysis"): with st.spinner("Processing... (Detecting + Vectorizing + Archiving)"): files = {"file": (uploaded_file.name, uploaded_file.getvalue(), uploaded_file.type)} res = requests.post(f"{API_BASE_URL}/analyze", files=files) if res.status_code == 200: data = res.json() st.success(f"✅ Record Archived! ID: {data['record_id']}") for det in data['detections']: st.info(f"**{det['class']}** - {det['confidence']:.2%} confidence") else: st.error(f"Analysis Failed: {res.text}") # --- Tab 2: Batch Processing --- with tab2: st.subheader("Bulk Analysis") # 1. Initialize Session State if "batch_uploader_key" not in st.session_state: st.session_state.batch_uploader_key = 0 if "last_batch_results" not in st.session_state: st.session_state.last_batch_results = None # 2. Display Persisted Results (if any) if st.session_state.last_batch_results: res_data = st.session_state.last_batch_results with st.container(border=True): st.success(f"✅ Successfully processed {res_data['processed_count']} images.") st.write("Generated Record IDs:") st.code(res_data['record_ids']) if st.button("Clear Results & Start New Batch"): st.session_state.last_batch_results = None st.rerun() st.divider() # 3. Uploader UI col_batch1, col_batch2 = st.columns([4, 1]) with col_batch1: uploaded_files = st.file_uploader( "Upload multiple images...", type=["jpg", "jpeg", "png"], accept_multiple_files=True, key=f"batch_{st.session_state.batch_uploader_key}" ) with col_batch2: st.write("##") # Alignment if st.button("🗑️ Reset Uploader"): st.session_state.batch_uploader_key += 1 st.rerun() if uploaded_files: if st.button(f"🚀 Process {len(uploaded_files)} Images"): with st.spinner("Batch Processing in progress..."): files = [("files", (f.name, f.getvalue(), f.type)) for f in uploaded_files] res = requests.post(f"{API_BASE_URL}/analyze_batch", files=files) if res.status_code == 200: # 4. Success: Store results and Clear Uploader automatically st.session_state.last_batch_results = res.json() st.session_state.batch_uploader_key += 1 st.rerun() else: st.error(f"Batch Failed: {res.text}") # --- Tab 3: Similarity Search --- with tab3: st.subheader("Hybrid Semantic Search") st.markdown("Search records by either **Image Similarity** or **Natural Language Query**.") with st.form("hybrid_search_form"): col_input1, col_input2 = st.columns(2) with col_input1: search_file = st.file_uploader("Option A: Search Image...", type=["jpg", "jpeg", "png"], key="search") with col_input2: text_query = st.text_input("Option B: Natural Language Query", placeholder="e.g., 'ripe bunches with dark spots' or 'unripe fruit'") top_k = st.slider("Results Limit (Top K)", 1, 20, 3) submit_search = st.form_submit_button("Run Semantic Search") if submit_search: if not search_file and not text_query: st.warning("Please provide either an image or a text query.") else: with st.spinner("Searching Vector Index..."): payload = {"limit": top_k} # If an image is uploaded, it takes precedence for visual search if search_file: files = {"file": (search_file.name, search_file.getvalue(), search_file.type)} # Pass top_k as part of the data res = requests.post(f"{API_BASE_URL}/search_hybrid", files=files, data=payload) # Otherwise, use text query elif text_query: payload["text_query"] = text_query # Send as form-data (data=) to match FastAPI's Form(None) res = requests.post(f"{API_BASE_URL}/search_hybrid", data=payload) if res.status_code == 200: results = res.json().get("results", []) if not results: st.warning("No similar records found.") else: st.success(f"Found {len(results)} matches.") for item in results: with st.container(border=True): c1, c2 = st.columns([1, 2]) # Fetch the image for this result rec_id = item["_id"] img_res = requests.get(f"{API_BASE_URL}/get_image/{rec_id}") with c1: if img_res.status_code == 200: img_b64 = img_res.json().get("image_data") if img_b64: st.image(base64.b64decode(img_b64), width=250) else: st.write("No image data found.") else: st.write("Failed to load image.") with c2: st.write(f"**Class:** {item['ripeness_class']}") st.write(f"**Similarity Score:** {item['score']:.4f}") st.write(f"**Timestamp:** {item['timestamp']}") st.write(f"**ID:** `{rec_id}`") else: st.error(f"Search failed: {res.text}")