Dr-Swopt 3 gün önce
ebeveyn
işleme
385a4a4eab
2 değiştirilmiş dosya ile 53 ekleme ve 9 silme
  1. 51 7
      demo_app.py
  2. 2 2
      src/api/main.py

+ 51 - 7
demo_app.py

@@ -14,6 +14,31 @@ from datetime import datetime
 from fpdf import FPDF
 
 
+@st.dialog("📘 AI Interpretation Guide")
+def show_tech_guide():
+    st.write("### 🎯 What does 'Confidence' mean?")
+    st.write("""
+    This is a probability score from **0.0 to 1.0**. 
+    - **0.90+**: The AI is nearly certain this is a bunch of this grade.
+    - **0.25 (Threshold)**: We ignore anything below this to filter out 'ghost' detections or background noise.
+    """)
+    
+    st.write("### 🛠️ The Raw Mathematical Tensor")
+    st.write("The AI returns a raw array of shape `[1, 300, 6]`. Here is the key:")
+    st.table({
+        "Index": ["0-3", "4", "5"],
+        "Meaning": ["Coordinates (x1, y1, x2, y2)", "Confidence Score", "Class ID (0-5)"],
+        "Reality": ["The 'Box' in the image.", "The AI's certainty.", "The Ripeness Grade."]
+    })
+    
+    st.write("### ⚡ Inference vs. Processing Time")
+    st.write("""
+    - **Inference Speed**: The time the AI model took to 'think' about the pixels.
+    - **Total Time**: Includes image uploading and database saving overhead.
+    """)
+    st.info("💡 **Engine Note**: ONNX is optimized for latency (~39ms), while PyTorch offers native indicator flexibility.")
+
+
 # --- 1. Global Backend Check ---
 API_BASE_URL = "http://localhost:8000"
 
@@ -81,6 +106,10 @@ if model_type == "pytorch":
 else:
     st.sidebar.info("ONNX Engine: ~39ms Latency")
 
+st.sidebar.markdown("---")
+if st.sidebar.button("❓ How to read results?", icon="📘", width='stretch'):
+    show_tech_guide()
+
 # Helper to reset results when files change
 def reset_single_results():
     st.session_state.last_detection = None
@@ -136,7 +165,7 @@ def display_interactive_results(image, detections, key=None):
         ))
 
     fig.update_layout(width=800, height=600, margin=dict(l=0, r=0, b=0, t=0), showlegend=False)
-    st.plotly_chart(fig, use_container_width=True, key=key)
+    st.plotly_chart(fig, width='stretch', key=key)
 
 def annotate_image(image, detections):
     """Draws high-visibility boxes and background-shaded labels."""
@@ -349,8 +378,15 @@ with tab1:
             col1, col2 = st.columns([1.5, 1]) # Keep original col structure for summary below
             
             with col1:
-                with st.expander("🛠️ Technical Evidence: Raw Output Tensor", expanded=False):
-                    st.write("First 5 detections from raw output tensor:")
+                col_tech_h1, col_tech_h2 = st.columns([4, 1])
+                with col_tech_h1:
+                    st.write("#### 🛠️ Technical Evidence")
+                with col_tech_h2:
+                    if st.button("❓ Guide", key="guide_tab1"):
+                        show_tech_guide()
+                
+                with st.expander("Raw Output Tensor (NMS-Free)", expanded=False):
+                    st.caption("See the Interpretation Guide for a breakdown of these numbers.")
                     st.json(data.get('raw_array_sample', []))
                 with st.container(border=True):
                     st.write("### 🏷️ Detection Results")
@@ -514,7 +550,7 @@ with tab2:
                         with st.container(border=True):
                             g_img = Image.open(up_file).convert("RGB")
                             g_annotated = annotate_image(g_img, gallery_map[up_file.name])
-                            st.image(g_annotated, caption=f"Evidence: {up_file.name}", use_container_width=True)
+                            st.image(g_annotated, caption=f"Evidence: {up_file.name}", width='stretch')
 
 
             # PDF Export Button (Pass images map)
@@ -657,7 +693,7 @@ with tab4:
                     # ListView Mode
                     st.write("### 📋 Record List")
                     df_history = pd.DataFrame(history_data)[['id', 'filename', 'timestamp', 'inference_ms']]
-                    st.dataframe(df_history, hide_index=True, use_container_width=True)
+                    st.dataframe(df_history, hide_index=True, width='stretch')
                     
                     id_to_select = st.number_input("Enter Record ID to view details:", min_value=int(df_history['id'].min()), max_value=int(df_history['id'].max()), step=1)
                     if st.button("Deep Dive Analysis", type="primary"):
@@ -701,8 +737,16 @@ with tab4:
                             st.error(f"Archive file not found: {record['archive_path']}")
                         
                         # Technical Evidence Expander
-                        with st.expander("🛠️ Technical Evidence: Raw Output Tensor"):
-                            raw_data = record.get('raw_array_sample')
+                        col_hist_tech1, col_hist_tech2 = st.columns([4, 1])
+                        with col_hist_tech1:
+                            st.write("#### 🛠️ Technical Evidence")
+                        with col_hist_tech2:
+                            if st.button("❓ Guide", key="guide_hist"):
+                                show_tech_guide()
+
+                        with st.expander("Raw Output Tensor (Archive)", expanded=False):
+                            st.caption("See the Interpretation Guide for a breakdown of these numbers.")
+                            raw_data = record.get('raw_tensor')
                             if raw_data:
                                 try:
                                     st.json(json.loads(raw_data))

+ 2 - 2
src/api/main.py

@@ -32,7 +32,7 @@ def init_local_db():
             detections TEXT,
             summary TEXT,
             inference_ms REAL,
-            raw_array_sample TEXT,
+            raw_tensor TEXT,
             timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
         )
     ''')
@@ -184,7 +184,7 @@ async def analyze_with_health_metrics(file: UploadFile = File(...), model_type:
     # Save to SQLite
     conn = sqlite3.connect(DB_PATH)
     cursor = conn.cursor()
-    cursor.execute("INSERT INTO history (filename, archive_path, detections, summary, inference_ms, raw_array_sample) VALUES (?, ?, ?, ?, ?, ?)",
+    cursor.execute("INSERT INTO history (filename, archive_path, detections, summary, inference_ms, raw_tensor) VALUES (?, ?, ?, ?, ?, ?)",
                    (file.filename, archive_path, json.dumps(detections), json.dumps(summary), inference_ms, json.dumps(raw_sample)))
     conn.commit()
     conn.close()