|
|
@@ -466,7 +466,13 @@ def generate_batch_report(data, uploaded_files_map=None):
|
|
|
|
|
|
|
|
|
# --- Tabs ---
|
|
|
-tab1, tab2, tab3, tab4 = st.tabs(["Single Analysis", "Batch Processing", "Similarity Search", "History Vault"])
|
|
|
+tab1, tab2, tab3, tab4, tab5 = st.tabs([
|
|
|
+ "Single Analysis",
|
|
|
+ "Batch Processing",
|
|
|
+ "Similarity Search",
|
|
|
+ "History Vault",
|
|
|
+ "Batch Reviewer"
|
|
|
+])
|
|
|
|
|
|
# --- Tab 1: Single Analysis ---
|
|
|
with tab1:
|
|
|
@@ -947,3 +953,93 @@ with tab4:
|
|
|
st.error(f"Vault Connection Failed: {res.text}")
|
|
|
except Exception as e:
|
|
|
st.error(f"Audit System Error: {str(e)}")
|
|
|
+
|
|
|
+# --- Tab 5: Batch Reviewer ---
|
|
|
+with tab5:
|
|
|
+ st.subheader("📦 Local Batch Reviewer")
|
|
|
+ st.caption("Provide a local directory path to review the AI Data Contract and evidence.")
|
|
|
+
|
|
|
+ # 1. Path Input
|
|
|
+ batch_path = st.text_input(
|
|
|
+ "Enter Batch Folder Path:",
|
|
|
+ placeholder="e.g., batch_outputs/BATCH_2646CB27",
|
|
|
+ help="The folder should contain 'manifest.json' and a 'raw' subfolder."
|
|
|
+ )
|
|
|
+
|
|
|
+ if batch_path:
|
|
|
+ manifest_path = os.path.join(batch_path, "manifest.json")
|
|
|
+ raw_dir = os.path.join(batch_path, "raw")
|
|
|
+
|
|
|
+ # 2. Validation
|
|
|
+ if not os.path.exists(manifest_path):
|
|
|
+ st.error(f"❌ Could not find `manifest.json` at: `{manifest_path}`")
|
|
|
+ elif not os.path.exists(raw_dir):
|
|
|
+ st.error(f"❌ Could not find `raw` folder at: `{raw_dir}`")
|
|
|
+ else:
|
|
|
+ # 3. Load the Contract
|
|
|
+ try:
|
|
|
+ with open(manifest_path, 'r') as f:
|
|
|
+ manifest = json.load(f)
|
|
|
+
|
|
|
+ # --- Batch Header: Metadata Audit ---
|
|
|
+ with st.container(border=True):
|
|
|
+ c1, c2, c3 = st.columns(3)
|
|
|
+ with c1:
|
|
|
+ st.metric("Batch ID", manifest['job_id'])
|
|
|
+ with c2:
|
|
|
+ ctx = manifest.get('source_context', {})
|
|
|
+ st.write(f"**Venue:** {ctx.get('estate', 'N/A')}")
|
|
|
+ st.write(f"**Block:** {ctx.get('block', 'B12')}")
|
|
|
+ with c3:
|
|
|
+ eng = manifest.get('engine', {})
|
|
|
+ st.write(f"**AI Engine:** {eng.get('name')} ({eng.get('type')})")
|
|
|
+ st.write(f"**Threshold:** {eng.get('threshold')}")
|
|
|
+
|
|
|
+ st.divider()
|
|
|
+
|
|
|
+ # --- Inventory Review ---
|
|
|
+ st.write("### 📂 Production Inventory")
|
|
|
+ for item in manifest['inventory']:
|
|
|
+ fname = item['filename']
|
|
|
+ img_full_path = os.path.join(raw_dir, fname)
|
|
|
+
|
|
|
+ if os.path.exists(img_full_path):
|
|
|
+ with st.expander(f"🖼️ {fname}", expanded=False):
|
|
|
+ img = Image.open(img_full_path).convert("RGB")
|
|
|
+ width, height = img.size
|
|
|
+
|
|
|
+ # --- Coordinate Remapping Engine ---
|
|
|
+ # We use 'norm_box' to remain resolution-agnostic for the subscriber
|
|
|
+ remapped_detections = []
|
|
|
+ for d in item['detections']:
|
|
|
+ nx1, ny1, nx2, ny2 = d['norm_box']
|
|
|
+ remapped_detections.append({
|
|
|
+ **d,
|
|
|
+ # Map ratios back to absolute pixels of the loaded image
|
|
|
+ "box": [nx1 * width, ny1 * height, nx2 * width, ny2 * height]
|
|
|
+ })
|
|
|
+
|
|
|
+ # --- Side-by-Side Review ---
|
|
|
+ v_col1, v_col2 = st.columns([2, 1])
|
|
|
+ with v_col1:
|
|
|
+ # Reuse high-performance interactive viewer
|
|
|
+ display_interactive_results(img, remapped_detections, key=f"rev_{item['image_id']}")
|
|
|
+
|
|
|
+ with v_col2:
|
|
|
+ st.write("#### 📡 Subscriber Payload")
|
|
|
+ st.info("Clean metadata ready for hand-off to ERP or Vectorization.")
|
|
|
+ # Extract non-geometric business data
|
|
|
+ payload = [{
|
|
|
+ "id": det['bunch_id'],
|
|
|
+ "grade": det['class'],
|
|
|
+ "score": det['confidence'],
|
|
|
+ "alert": det['is_health_alert']
|
|
|
+ } for det in remapped_detections]
|
|
|
+ st.json(payload)
|
|
|
+
|
|
|
+ if st.button(f"🚀 Vectorize Image {item['image_id']}", key=f"btn_{item['image_id']}"):
|
|
|
+ st.toast(f"Broadcasting data for {fname} to remote subscribers...")
|
|
|
+ else:
|
|
|
+ st.warning(f"⚠️ Image missing from /raw folder: `{fname}`")
|
|
|
+ except Exception as e:
|
|
|
+ st.error(f"Failed to load batch: {e}")
|