| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120 |
- import streamlit as st
- import requests
- from PIL import Image
- import io
- import base64
- # --- API Configuration ---
- API_BASE_URL = "http://localhost:8000"
- # --- Page Config ---
- st.set_page_config(page_title="Palm Oil Ripeness AI", layout="wide")
- st.title("🌴 Palm Oil FFB Management System")
- st.markdown("### Production-Ready AI Analysis & Archival")
- # --- Sidebar ---
- st.sidebar.header("Backend Controls")
- def update_confidence():
- new_conf = st.session_state.conf_slider
- try:
- requests.post(f"{API_BASE_URL}/set_confidence", json={"threshold": new_conf})
- st.toast(f"Threshold updated to {new_conf}")
- except:
- st.sidebar.error("Failed to update threshold")
- try:
- response = requests.get(f"{API_BASE_URL}/get_confidence")
- if response.status_code == 200:
- current_conf = response.json().get("current_confidence", 0.25)
- st.sidebar.success(f"Connected to API")
-
- # Synchronized Slider
- st.sidebar.slider(
- "Confidence Threshold",
- 0.1, 1.0,
- value=float(current_conf),
- key="conf_slider",
- on_change=update_confidence
- )
- else:
- st.sidebar.error("API Error")
- except:
- st.sidebar.error("Could not connect to Backend API. Please ensure it is running.")
- # --- Tabs ---
- tab1, tab2, tab3 = st.tabs(["Single Analysis", "Batch Processing", "Similarity Search"])
- # --- Tab 1: Single Analysis ---
- with tab1:
- st.subheader("Analyze Single Bunch")
- uploaded_file = st.file_uploader("Upload a bunch image...", type=["jpg", "jpeg", "png"], key="single")
-
- if uploaded_file:
- col1, col2 = st.columns(2)
- with col1:
- st.image(uploaded_file, caption="Input", use_container_width=True)
-
- with col2:
- if st.button("Run Full Analysis"):
- with st.spinner("Processing... (Detecting + Vectorizing + Archiving)"):
- files = {"file": (uploaded_file.name, uploaded_file.getvalue(), uploaded_file.type)}
- res = requests.post(f"{API_BASE_URL}/analyze", files=files)
-
- if res.status_code == 200:
- data = res.json()
- st.success(f"✅ Record Archived! ID: {data['record_id']}")
-
- for det in data['detections']:
- st.info(f"**{det['class']}** - {det['confidence']:.2%} confidence")
- else:
- st.error(f"Analysis Failed: {res.text}")
- # --- Tab 2: Batch Processing ---
- with tab2:
- st.subheader("Bulk Analysis")
- uploaded_files = st.file_uploader("Upload multiple images...", type=["jpg", "jpeg", "png"], accept_multiple_files=True, key="batch")
-
- if uploaded_files:
- if st.button(f"Process {len(uploaded_files)} Images"):
- with st.spinner("Batch Processing in progress..."):
- files = [("files", (f.name, f.getvalue(), f.type)) for f in uploaded_files]
- res = requests.post(f"{API_BASE_URL}/analyze_batch", files=files)
-
- if res.status_code == 200:
- data = res.json()
- st.success(f"Successfully processed {data['processed_count']} images.")
- st.write("Generated Record IDs:")
- st.code(data['record_ids'])
- else:
- st.error("Batch Failed")
- # --- Tab 3: Similarity Search ---
- with tab3:
- st.subheader("Atlas Vector Search")
- st.markdown("Upload an image to find the most similar historical records in the database.")
- search_file = st.file_uploader("Search Image...", type=["jpg", "jpeg", "png"], key="search")
-
- if search_file:
- st.image(search_file, width=300)
- if st.button("Find Similar Bunches"):
- with st.spinner("Searching Vector Index..."):
- files = {"file": (search_file.name, search_file.getvalue(), search_file.type)}
- res = requests.post(f"{API_BASE_URL}/search", files=files)
-
- if res.status_code == 200:
- results = res.json().get("results", [])
- if not results:
- st.warning("No similar records found.")
- else:
- for item in results:
- with st.container(border=True):
- c1, c2 = st.columns([1, 2])
- # Note: Actual prod app would fetch image_data by id here
- # For demo, we show the textual metadata
- with c2:
- st.write(f"**Class:** {item['ripeness_class']}")
- st.write(f"**Similarity Score:** {item['score']:.4f}")
- st.write(f"**Timestamp:** {item['timestamp']}")
- else:
- st.error("Search failed")
|