|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import streamlit as st |
|
|
st.set_page_config(page_title="GeoMate V2", page_icon="🌍", layout="wide", initial_sidebar_state="expanded") |
|
|
|
|
|
|
|
|
import os |
|
|
import io |
|
|
import json |
|
|
import time |
|
|
import math |
|
|
import base64 |
|
|
import textwrap |
|
|
from typing import Any, Dict, List, Optional, Tuple |
|
|
|
|
|
|
|
|
from streamlit_option_menu import option_menu |
|
|
import matplotlib.pyplot as plt |
|
|
|
|
|
|
|
|
try: |
|
|
import faiss |
|
|
except Exception: |
|
|
faiss = None |
|
|
|
|
|
try: |
|
|
import reportlab |
|
|
from reportlab.lib import colors |
|
|
from reportlab.lib.pagesizes import A4 |
|
|
from reportlab.lib.units import mm |
|
|
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak, Flowable |
|
|
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle |
|
|
except Exception: |
|
|
reportlab = None |
|
|
|
|
|
try: |
|
|
import geemap |
|
|
import ee |
|
|
except Exception: |
|
|
geemap = None |
|
|
ee = None |
|
|
|
|
|
try: |
|
|
import easyocr |
|
|
except Exception: |
|
|
easyocr = None |
|
|
|
|
|
|
|
|
try: |
|
|
from groq import Groq |
|
|
except Exception: |
|
|
Groq = None |
|
|
|
|
|
|
|
|
try: |
|
|
from sentence_transformers import SentenceTransformer |
|
|
except Exception: |
|
|
SentenceTransformer = None |
|
|
|
|
|
|
|
|
|
|
|
def _get_env_secret(key: str) -> Optional[str]: |
|
|
|
|
|
val = os.environ.get(key) |
|
|
if val: |
|
|
return val |
|
|
try: |
|
|
return st.secrets.get(key) |
|
|
except Exception: |
|
|
return None |
|
|
|
|
|
|
|
|
REQUIRED_SECRETS = ["GROQ_API_KEY", "SERVICE_ACCOUNT", "EARTH_ENGINE_KEY"] |
|
|
|
|
|
missing = [] |
|
|
for sname in REQUIRED_SECRETS: |
|
|
if not _get_env_secret(sname): |
|
|
missing.append(sname) |
|
|
|
|
|
|
|
|
if missing: |
|
|
st.markdown( |
|
|
""" |
|
|
<style> |
|
|
.secret-error { background: #200; border-left: 6px solid #FF7A00; padding: 12px; border-radius:8px; } |
|
|
</style> |
|
|
""", unsafe_allow_html=True |
|
|
) |
|
|
st.error( |
|
|
f"Missing required secrets: {', '.join(missing)}. " |
|
|
"Please add them in your Hugging Face Space Secrets or in environment variables and restart the app." |
|
|
) |
|
|
st.stop() |
|
|
|
|
|
|
|
|
GROQ_API_KEY = _get_env_secret("GROQ_API_KEY") |
|
|
SERVICE_ACCOUNT = _get_env_secret("SERVICE_ACCOUNT") |
|
|
EARTH_ENGINE_KEY = _get_env_secret("EARTH_ENGINE_KEY") |
|
|
|
|
|
|
|
|
def groq_client(): |
|
|
if Groq is None: |
|
|
raise RuntimeError("groq package not installed. Add 'groq' to requirements.txt.") |
|
|
return Groq(api_key=GROQ_API_KEY) |
|
|
|
|
|
|
|
|
ss = st.session_state |
|
|
|
|
|
|
|
|
if "page" not in ss: |
|
|
ss.page = "Landing" |
|
|
if "llm_model" not in ss: |
|
|
|
|
|
ss.llm_model = "meta-llama/llama-4-maverick-17b-128e-instruct" |
|
|
if "sites" not in ss: |
|
|
|
|
|
ss.sites = [] |
|
|
if "active_site_idx" not in ss: |
|
|
ss.active_site_idx = 0 |
|
|
if "faiss_loaded" not in ss: |
|
|
ss.faiss_loaded = False |
|
|
if "faiss_index" not in ss: |
|
|
ss.faiss_index = None |
|
|
if "faiss_meta" not in ss: |
|
|
ss.faiss_meta = None |
|
|
|
|
|
|
|
|
THEME = { |
|
|
"bg": "#060606", |
|
|
"panel": "#0b0b0b", |
|
|
"accent": "#FF7A00", |
|
|
"accent2": "#C62828", |
|
|
"blue": "#1F4E79", |
|
|
"muted": "#9aa7bf", |
|
|
"bubble_bg": "#0f1724", |
|
|
} |
|
|
|
|
|
|
|
|
MAX_SITES = 4 |
|
|
|
|
|
|
|
|
def new_empty_site(name: str = "Site") -> Dict[str, Any]: |
|
|
"""Create a new site dict with all required fields pre-populated as None or sensible defaults.""" |
|
|
return { |
|
|
"Site Name": name, |
|
|
"Site Coordinates": "", |
|
|
"lat": None, |
|
|
"lon": None, |
|
|
"Load Bearing Capacity": None, |
|
|
"Skin Shear Strength": None, |
|
|
"Relative Compaction": None, |
|
|
"Rate of Consolidation": None, |
|
|
"Nature of Construction": None, |
|
|
"Soil Profile": None, |
|
|
"Flood Data": None, |
|
|
"Seismic Data": None, |
|
|
"Topography": None, |
|
|
"GSD": None, |
|
|
"USCS": None, |
|
|
"AASHTO": None, |
|
|
"GI": None, |
|
|
"classifier_inputs": {}, |
|
|
"classifier_decision_path": "", |
|
|
"chat_history": [], |
|
|
"report_convo_state": 0, |
|
|
"map_snapshot": None, |
|
|
"ocr_pending": False, |
|
|
} |
|
|
|
|
|
def get_active_site() -> Dict[str, Any]: |
|
|
"""Return the active site dict. If none exists, create one.""" |
|
|
if not ss.sites: |
|
|
ss.sites = [new_empty_site("Home")] |
|
|
ss.active_site_idx = 0 |
|
|
|
|
|
if ss.active_site_idx < 0: |
|
|
ss.active_site_idx = 0 |
|
|
if ss.active_site_idx >= len(ss.sites): |
|
|
ss.active_site_idx = max(0, len(ss.sites) - 1) |
|
|
return ss.sites[ss.active_site_idx] |
|
|
|
|
|
def save_active_site(site_dict: Dict[str, Any]): |
|
|
"""Save the given dict into the active site slot.""" |
|
|
if not ss.sites: |
|
|
ss.sites = [site_dict] |
|
|
ss.active_site_idx = 0 |
|
|
else: |
|
|
ss.sites[ss.active_site_idx] = site_dict |
|
|
|
|
|
def add_site(name: str): |
|
|
if len(ss.sites) >= MAX_SITES: |
|
|
st.warning(f"Maximum of {MAX_SITES} sites reached.") |
|
|
return |
|
|
ss.sites.append(new_empty_site(name)) |
|
|
ss.active_site_idx = len(ss.sites) - 1 |
|
|
|
|
|
def remove_site(idx: int): |
|
|
if idx < 0 or idx >= len(ss.sites): |
|
|
return |
|
|
ss.sites.pop(idx) |
|
|
if ss.active_site_idx >= len(ss.sites): |
|
|
ss.active_site_idx = max(0, len(ss.sites) - 1) |
|
|
|
|
|
|
|
|
def pretty_site_json(site: Dict[str, Any]) -> str: |
|
|
try: |
|
|
return json.dumps(site, indent=2, default=str) |
|
|
except Exception: |
|
|
return str(site) |
|
|
|
|
|
|
|
|
def sidebar_ui(): |
|
|
with st.sidebar: |
|
|
st.markdown(f"<h3 style='color:{THEME['accent']};margin:6px 0;'>GeoMate V2</h3>", unsafe_allow_html=True) |
|
|
|
|
|
st.markdown("**Select LLM model**") |
|
|
model_choice = st.selectbox( |
|
|
"Model", |
|
|
options=[ |
|
|
"meta-llama/llama-4-maverick-17b-128e-instruct", |
|
|
"llama3-8b-8192", |
|
|
"mixtral-8x7b-32768", |
|
|
"gemma-7b-it" |
|
|
], |
|
|
index=0, |
|
|
key="sidebar_model_select" |
|
|
) |
|
|
ss.llm_model = model_choice |
|
|
|
|
|
st.markdown("---") |
|
|
st.markdown("**Project Sites**") |
|
|
|
|
|
colA, colB = st.columns([2,1]) |
|
|
with colA: |
|
|
|
|
|
new_site_name = st.text_input("New site name", key="new_site_name_input") |
|
|
with colB: |
|
|
if st.button("➕ Add", key="add_site_btn"): |
|
|
name = new_site_name.strip() or f"Site {len(ss.sites)+1}" |
|
|
add_site(name) |
|
|
st.success(f"Added site: {name}") |
|
|
st.rerun() |
|
|
|
|
|
|
|
|
if ss.sites: |
|
|
names = [s.get("Site Name", f"Site {i+1}") for i,s in enumerate(ss.sites)] |
|
|
asel = st.selectbox("Active Site", options=names, index=ss.active_site_idx, key="active_site_select") |
|
|
|
|
|
ss.active_site_idx = names.index(asel) |
|
|
|
|
|
if st.button("🗑️ Remove active site", key="remove_site_btn"): |
|
|
idx = ss.active_site_idx |
|
|
removed_name = ss.sites[idx].get("Site Name","Site") |
|
|
remove_site(idx) |
|
|
st.success(f"Removed site {removed_name}") |
|
|
st.rerun() |
|
|
else: |
|
|
st.info("No sites yet. Add one above.") |
|
|
|
|
|
st.markdown("---") |
|
|
|
|
|
with st.expander("Show active site JSON"): |
|
|
st.code(pretty_site_json(get_active_site()), language="json") |
|
|
|
|
|
st.markdown("---") |
|
|
|
|
|
pages = ["Landing", "Soil Recognizer", "Soil Classifier", "GSD Curve", "Locator", "GeoMate Ask", "Reports"] |
|
|
icons = ["house", "image", "flask", "bar-chart", "geo-alt", "robot", "file-earmark-text"] |
|
|
choice = option_menu( |
|
|
menu_title=None, |
|
|
options=pages, |
|
|
icons=icons, |
|
|
menu_icon="cast", |
|
|
default_index=pages.index(ss.page) if ss.page in pages else 0, |
|
|
orientation="vertical", |
|
|
styles={ |
|
|
"container": {"padding": "6px", "background-color": THEME["panel"]}, |
|
|
"icon": {"color": THEME["accent"], "font-size": "18px"}, |
|
|
"nav-link": {"font-size": "14px", "text-align": "left", "margin":"4px"}, |
|
|
"nav-link-selected": {"background-color": THEME["accent"], "color": "white"}, |
|
|
} |
|
|
) |
|
|
if choice != ss.page: |
|
|
ss.page = choice |
|
|
st.rerun() |
|
|
|
|
|
st.markdown("---") |
|
|
if st.button("Reset Session (keep secrets)"): |
|
|
for k in list(ss.keys()): |
|
|
if k not in ["page", "llm_model"]: |
|
|
del ss[k] |
|
|
|
|
|
ss.sites = [new_empty_site("Home")] |
|
|
ss.active_site_idx = 0 |
|
|
st.success("Session reset.") |
|
|
st.rerun() |
|
|
|
|
|
st.markdown(f"<div style='color:{THEME['muted']};font-size:12px;padding-top:6px'>GeoMate V2 — Streamlit • Multi-site • RAG + Groq</div>", unsafe_allow_html=True) |
|
|
|
|
|
|
|
|
def landing_ui(): |
|
|
st.markdown( |
|
|
f""" |
|
|
<style> |
|
|
.hero {{ |
|
|
background: linear-gradient(180deg, rgba(255,122,0,0.06), rgba(255,122,0,0.02)); |
|
|
border-radius: 12px; |
|
|
padding: 18px; |
|
|
border: 1px solid rgba(255,122,0,0.08); |
|
|
}} |
|
|
.globe {{ |
|
|
width:120px;height:120px;border-radius:999px; |
|
|
background: conic-gradient({THEME['accent']}, {THEME['accent2']}, {THEME['blue']}); |
|
|
box-shadow: 0 10px 40px rgba(0,0,0,0.6); |
|
|
display:inline-block;margin-right:18px; |
|
|
}} |
|
|
.cta {{ |
|
|
background: linear-gradient(90deg, {THEME['accent']}, {THEME['accent2']}); |
|
|
color: white;padding:10px 18px;border-radius:10px;border: none; |
|
|
}} |
|
|
</style> |
|
|
""" |
|
|
, unsafe_allow_html=True) |
|
|
|
|
|
col1, col2 = st.columns([2,1]) |
|
|
with col1: |
|
|
st.markdown("<div class='hero'>", unsafe_allow_html=True) |
|
|
st.markdown("<div style='display:flex;align-items:center'>") |
|
|
st.markdown("<div class='globe'></div>", unsafe_allow_html=True) |
|
|
st.markdown("<div><h1 style='margin:0;color:#FF8C00'>GeoMate V2</h1><div style='color:#9aa7bf'>AI copilot for geotechnical engineering</div></div>", unsafe_allow_html=True) |
|
|
st.markdown("</div>") |
|
|
st.markdown("<hr/>", unsafe_allow_html=True) |
|
|
st.markdown(""" |
|
|
<ul> |
|
|
<li><b>Soil Recognizer:</b> Image-based soil detection (upload photos or use OCR).</li> |
|
|
<li><b>Classifier:</b> Verbatim USCS & AASHTO logic (chatbot style).</li> |
|
|
<li><b>Locator:</b> Draw AOI on map, fetch soil/flood/seismic/topography via Earth Engine.</li> |
|
|
<li><b>GeoMate Ask:</b> RAG-enabled LLM (FAISS + Groq) with session memory per site.</li> |
|
|
<li><b>Reports:</b> Classification-only & full Geotechnical PDF reports (styled).</li> |
|
|
</ul> |
|
|
""", unsafe_allow_html=True) |
|
|
st.markdown("</div>", unsafe_allow_html=True) |
|
|
|
|
|
st.markdown("### Quick actions") |
|
|
c1, c2, c3 = st.columns(3) |
|
|
if c1.button("🧪 Classifier"): |
|
|
ss.page = "Soil Classifier" |
|
|
st.rerun() |
|
|
if c2.button("📊 GSD Curve"): |
|
|
ss.page = "GSD Curve" |
|
|
st.rerun() |
|
|
if c3.button("🌍 Locator"): |
|
|
ss.page = "Locator" |
|
|
st.rerun() |
|
|
|
|
|
with col2: |
|
|
st.markdown("<div style='padding:12px;border-radius:10px;background:#06121a'>", unsafe_allow_html=True) |
|
|
active = get_active_site() |
|
|
st.markdown(f"<div style='font-size:16px;color:{THEME['accent']}'><b>Active site</b></div>", unsafe_allow_html=True) |
|
|
st.markdown(f"<div style='font-size:14px'>{active.get('Site Name','-')}</div>", unsafe_allow_html=True) |
|
|
st.markdown("<hr/>", unsafe_allow_html=True) |
|
|
st.markdown(f"<div style='color:{THEME['muted']};font-size:13px'>Sites configured: <b>{len(ss.sites)}</b></div>", unsafe_allow_html=True) |
|
|
st.markdown(f"<div style='color:{THEME['muted']};font-size:13px'>Saved classifications: <b>{len([s for s in ss.sites if s.get('USCS') or s.get('AASHTO')])}</b></div>", unsafe_allow_html=True) |
|
|
st.markdown("</div>", unsafe_allow_html=True) |
|
|
|
|
|
st.markdown("---") |
|
|
st.info("Tip: Use the sidebar to switch pages or the quick buttons above. All data is stored in this session (up to 4 sites).") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def soil_recognizer_ui(): |
|
|
"""Image-based soil recognizer — placeholder here, implemented in Part 2.""" |
|
|
st.header("Soil Recognizer") |
|
|
st.info("Upload an image of soil. OCR and image model will extract features and suggest soil type. (Implemented in Part 2/3)") |
|
|
|
|
|
def soil_classifier_ui(): |
|
|
"""Soil classifier (chat-style). Full implementation continues in Part 2.""" |
|
|
st.header("Soil Classifier") |
|
|
st.info("Chat-style classifier will be displayed here. (Detailed implementation in Part 2)") |
|
|
|
|
|
def gsd_curve_ui(): |
|
|
"""GSD Curve page (upload data or enter diameters/passing). Implemented in Part 2.""" |
|
|
st.header("GSD Curve") |
|
|
st.info("Plot GSD curves, compute D10/D30/D60, Cu, Cc. (Detailed implementation in Part 2)") |
|
|
|
|
|
def locator_ui(): |
|
|
"""Locator page — interactive map and Earth Engine integration. Implemented in Part 3.""" |
|
|
st.header("Locator") |
|
|
st.info("Draw AOI, fetch soil, flood, seismic and topography data. (Implemented in Part 3)") |
|
|
|
|
|
def rag_ui(): |
|
|
"""GeoMate Ask — RAG Chatbot. Implemented in Part 4.""" |
|
|
st.header("GeoMate Ask (RAG + Groq)") |
|
|
st.info("RAG-based technical chatbot with memory per site. (Implemented in Part 4)") |
|
|
|
|
|
def reports_ui(): |
|
|
"""Reports UI: classification-only and full geotechnical report generator. Implemented in Part 4.""" |
|
|
st.header("Reports") |
|
|
st.info("Generate Classification-only or Full Geotechnical PDF reports. (Implemented in Part 4)") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main(): |
|
|
sidebar_ui() |
|
|
page = ss.page if hasattr(ss, "page") else "Landing" |
|
|
|
|
|
if page == "Landing": |
|
|
landing_ui() |
|
|
elif page == "Soil Recognizer": |
|
|
soil_recognizer_ui() |
|
|
elif page == "Soil Classifier": |
|
|
soil_classifier_ui() |
|
|
elif page == "GSD Curve": |
|
|
gsd_curve_ui() |
|
|
elif page == "Locator": |
|
|
locator_ui() |
|
|
elif page == "GeoMate Ask": |
|
|
rag_ui() |
|
|
elif page == "Reports": |
|
|
reports_ui() |
|
|
else: |
|
|
st.warning("Unknown page. Returning to Landing.") |
|
|
ss.page = "Landing" |
|
|
landing_ui() |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def soil_recognizer_page(): |
|
|
st.header("🖼️ Soil Recognizer") |
|
|
idx = st.session_state["active_site_idx"] |
|
|
|
|
|
st.write("Upload a soil sample photo. If a trained model is available, it will infer the soil class.") |
|
|
|
|
|
uploaded = st.file_uploader( |
|
|
"Upload sample photo", |
|
|
type=["png", "jpg", "jpeg"], |
|
|
key=mk("sr_upload", idx) |
|
|
) |
|
|
|
|
|
if uploaded: |
|
|
img = Image.open(uploaded).convert("RGB") |
|
|
st.image(img, use_column_width=True) |
|
|
|
|
|
if torch and os.path.exists("soil_best_model.pth"): |
|
|
st.info("✅ Model found — running inference (CPU).") |
|
|
|
|
|
try: |
|
|
|
|
|
model = torch.load("soil_best_model.pth", map_location="cpu") |
|
|
if hasattr(model, "eval"): |
|
|
model.eval() |
|
|
|
|
|
|
|
|
transform = T.Compose([ |
|
|
T.Resize((224, 224)), |
|
|
T.ToTensor(), |
|
|
T.Normalize([0.485, 0.456, 0.406], |
|
|
[0.229, 0.224, 0.225]) |
|
|
]) |
|
|
inp = transform(img).unsqueeze(0) |
|
|
|
|
|
with st.spinner("Running model..."): |
|
|
logits = model(inp) |
|
|
probs = torch.softmax(logits, dim=-1).detach().cpu().numpy()[0] |
|
|
|
|
|
labels = ["Sand", "Silt", "Clay", "Gravel", "Peat"] |
|
|
best = labels[int(np.argmax(probs))] |
|
|
conf = float(np.max(probs)) |
|
|
|
|
|
st.success(f"Predicted: **{best}** (confidence {conf:.2%})") |
|
|
|
|
|
if st.button("Save to site", key=mk("sr_save_btn", idx)): |
|
|
st.session_state["sites"][idx]["Soil Profile"] = best |
|
|
st.success("✅ Saved soil profile to site.") |
|
|
|
|
|
except Exception as e: |
|
|
st.error(f"❌ Model inference failed: {e}") |
|
|
|
|
|
else: |
|
|
|
|
|
st.warning("⚠️ No trained model file found — running heuristic fallback.") |
|
|
arr = np.array(img.resize((50, 50))).mean(axis=(0, 1)) |
|
|
r, g, b = arr |
|
|
if r > 120 and g > 110: |
|
|
pred = "Sand" |
|
|
else: |
|
|
pred = "Silt" |
|
|
|
|
|
st.success(f"Fallback prediction: **{pred}**") |
|
|
|
|
|
if st.button("Save fallback to site", key=mk("sr_save_fallback", idx)): |
|
|
st.session_state["sites"][idx]["Soil Profile"] = pred |
|
|
st.success("✅ Saved fallback result to site.") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import pytesseract |
|
|
import tempfile |
|
|
from PIL import Image |
|
|
from typing import Dict, Any, Tuple |
|
|
|
|
|
|
|
|
def run_ocr_on_image(uploaded_file) -> Dict[str, float]: |
|
|
"""Run OCR on uploaded soil problem sheet to extract LL, PL, sieve %s.""" |
|
|
img = Image.open(uploaded_file).convert("L") |
|
|
text = pytesseract.image_to_string(img) |
|
|
extracted = {} |
|
|
|
|
|
|
|
|
for line in text.splitlines(): |
|
|
if "LL" in line.upper(): |
|
|
try: extracted["LL"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0]) |
|
|
except: pass |
|
|
if "PL" in line.upper(): |
|
|
try: extracted["PL"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0]) |
|
|
except: pass |
|
|
if "#200" in line or "200" in line: |
|
|
try: extracted["P200"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0]) |
|
|
except: pass |
|
|
if "#40" in line or "40" in line: |
|
|
try: extracted["P40"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0]) |
|
|
except: pass |
|
|
return extracted |
|
|
|
|
|
|
|
|
|
|
|
def classify_aashto(inputs: Dict[str, Any]) -> Tuple[str, str, str]: |
|
|
"""Full AASHTO logic + Group Index + explanation.""" |
|
|
from math import floor |
|
|
P2 = inputs.get("P200", 0.0) |
|
|
P4 = inputs.get("P40", 0.0) |
|
|
LL = inputs.get("LL", 0.0) |
|
|
PL = inputs.get("PL", 0.0) |
|
|
PI = LL - PL |
|
|
result = "A-0" |
|
|
desc = "" |
|
|
GI = 0 |
|
|
|
|
|
if P2 <= 35: |
|
|
if P2 <= 15 and P4 <= 30 and PI <= 6: |
|
|
P1 = inputs.get("P10", 0.0) |
|
|
if P1 <= 50: |
|
|
result = "A-1-a"; desc = "Granular soil, excellent subgrade." |
|
|
else: |
|
|
result = "A-1-b"; desc = "Granular soil with fines, still good subgrade." |
|
|
elif P2 <= 25 and P4 <= 50 and PI <= 6: |
|
|
result = "A-1-b"; desc = "Granular soil with more fines, fair performance." |
|
|
elif P2 <= 35: |
|
|
if LL <= 40 and PI <= 10: result = "A-2-4"; desc = "Granular soil with silt, fair subgrade." |
|
|
elif LL >= 41 and PI <= 10: result = "A-2-5"; desc = "Granular soil, high LL silt content." |
|
|
elif LL <= 40 and PI >= 11: result = "A-2-6"; desc = "Granular soil with clayey fines." |
|
|
else: result = "A-2-7"; desc = "Granular soil, poor clayey fines." |
|
|
else: |
|
|
result = "A-3"; desc = "Clean sands, excellent highway subgrade." |
|
|
else: |
|
|
if LL <= 40 and PI <= 10: result = "A-4"; desc = "Silt, fair to poor subgrade." |
|
|
elif LL >= 41 and PI <= 10: result = "A-5"; desc = "Elastic silt, very poor subgrade." |
|
|
elif LL <= 40 and PI >= 11: result = "A-6"; desc = "Clay of low plasticity, poor subgrade." |
|
|
else: |
|
|
if PI <= (LL-30): result = "A-7-5"; desc = "Clay, high LL, fair plasticity." |
|
|
else: result = "A-7-6"; desc = "Clay, high plasticity, very poor subgrade." |
|
|
|
|
|
|
|
|
a = min(max(P2 - 35, 0), 40) |
|
|
b = min(max(P2 - 15, 0), 40) |
|
|
c = min(max(LL - 40, 0), 20) |
|
|
d = min(max(PI - 10, 0), 20) |
|
|
GI = floor(0.2*a + 0.005*a*c + 0.01*b*d) |
|
|
|
|
|
return result, desc, str(GI) |
|
|
|
|
|
|
|
|
def classify_uscs(inputs: Dict[str, Any]) -> Tuple[str, str]: |
|
|
"""Full USCS logic with Cu, Cc, PI, DS/DIL/TG.""" |
|
|
P2 = inputs.get("P200", 0.0) |
|
|
if inputs.get("organic", False): |
|
|
return "Pt", "Peat / Organic soil — compressible, poor engineering properties." |
|
|
|
|
|
if P2 <= 50: |
|
|
P4 = inputs.get("P4", 0.0) |
|
|
D60, D30, D10 = inputs.get("D60", 0.0), inputs.get("D30", 0.0), inputs.get("D10", 0.0) |
|
|
LL, PL = inputs.get("LL", 0.0), inputs.get("PL", 0.0) |
|
|
PI = LL - PL |
|
|
Cu, Cc = 0, 0 |
|
|
if all([D60, D30, D10]): |
|
|
Cu = D60/D10 if D10 else 0 |
|
|
Cc = (D30**2)/(D10*D60) if D10*D60 else 0 |
|
|
|
|
|
if P4 <= 50: |
|
|
if Cu >= 4 and 1 <= Cc <= 3: return "GW", "Well-graded gravel, excellent foundation material." |
|
|
elif PI <= 7: return "GM", "Silty gravel, moderate quality." |
|
|
else: return "GC", "Clayey gravel, reduced drainage." |
|
|
else: |
|
|
if Cu >= 6 and 1 <= Cc <= 3: return "SW", "Well-graded sand, excellent engineering soil." |
|
|
elif PI <= 7: return "SM", "Silty sand, fair to moderate." |
|
|
else: return "SC", "Clayey sand, reduced strength." |
|
|
else: |
|
|
LL, PL = inputs.get("LL", 0.0), inputs.get("PL", 0.0) |
|
|
PI = LL - PL |
|
|
if LL < 50: |
|
|
if PI <= 7: return "ML", "Low plasticity silt." |
|
|
elif PI > 7: return "CL", "Low plasticity clay." |
|
|
else: |
|
|
if PI < 0.73*(LL-20): return "MH", "Elastic silt." |
|
|
else: return "CH", "High plasticity clay, compressible, weak foundation soil." |
|
|
return "ML", "Default: Low plasticity silt." |
|
|
|
|
|
|
|
|
|
|
|
def soil_classifier_ui(): |
|
|
st.header("🤖 Soil Classifier (Chatbot + OCR + LLM)") |
|
|
site = get_active_site() |
|
|
|
|
|
if "classifier_state" not in site: |
|
|
site["classifier_state"] = 0 |
|
|
site["classifier_inputs"] = {} |
|
|
site["classifier_chat"] = [] |
|
|
|
|
|
chat = site["classifier_chat"] |
|
|
|
|
|
def add_bot(msg: str): |
|
|
chat.append(["bot", msg]) |
|
|
def add_user(msg: str): |
|
|
chat.append(["user", msg]) |
|
|
|
|
|
|
|
|
for role, msg in chat: |
|
|
bubble_color = THEME["bubble_bg"] if role=="bot" else "#1f2a44" |
|
|
border = f"2px solid {THEME['accent']}" if role=="bot" else "1px solid #333" |
|
|
st.markdown(f""" |
|
|
<div style='margin:6px 0;padding:8px 12px;background:{bubble_color}; |
|
|
border-radius:14px;border:{border};max-width:80%;'> |
|
|
<b>{'🤖' if role=='bot' else '👤'}:</b> {msg} |
|
|
</div> |
|
|
""", unsafe_allow_html=True) |
|
|
|
|
|
state = site["classifier_state"] |
|
|
inputs = site["classifier_inputs"] |
|
|
|
|
|
|
|
|
uploaded = st.file_uploader("📄 Upload soil test sheet (OCR)", type=["jpg","png","jpeg"]) |
|
|
if uploaded: |
|
|
ocr_data = run_ocr_on_image(uploaded) |
|
|
inputs.update(ocr_data) |
|
|
add_bot(f"OCR detected values: {ocr_data}") |
|
|
|
|
|
|
|
|
if state == 0 and not chat: |
|
|
add_bot("Hello 👋 I am GeoMate Soil Classifier. Let's begin. Is the soil organic (spongy, dark, odorous)? (y/n)") |
|
|
site["classifier_state"] = 1 |
|
|
save_active_site(site) |
|
|
|
|
|
|
|
|
user_in = st.text_input("Your answer:", key=f"classifier_input_{state}") |
|
|
if st.button("➡️", key=f"classifier_submit_{state}"): |
|
|
if user_in.strip(): |
|
|
add_user(user_in.strip()) |
|
|
|
|
|
|
|
|
if state == 1: |
|
|
if user_in.lower().startswith("y"): |
|
|
inputs["organic"] = True |
|
|
uscs, desc1 = classify_uscs(inputs) |
|
|
aashto, desc2, gi = classify_aashto(inputs) |
|
|
add_bot(f"Classification complete ✅ USCS={uscs} ({desc1}), AASHTO={aashto} (GI={gi}, {desc2})") |
|
|
|
|
|
full_report = query_llm_for_soil(uscs, aashto, desc1, desc2, gi) |
|
|
add_bot(full_report) |
|
|
site["classifier_state"] = -1 |
|
|
else: |
|
|
inputs["organic"] = False |
|
|
add_bot("What is % passing #200 sieve?") |
|
|
site["classifier_state"] = 2 |
|
|
elif state == 2: |
|
|
try: inputs["P200"] = float(user_in) |
|
|
except: inputs["P200"] = 0.0 |
|
|
add_bot("What is % passing #40 sieve?") |
|
|
site["classifier_state"] = 3 |
|
|
elif state == 3: |
|
|
try: inputs["P40"] = float(user_in) |
|
|
except: inputs["P40"] = 0.0 |
|
|
add_bot("Enter Liquid Limit (LL):") |
|
|
site["classifier_state"] = 4 |
|
|
elif state == 4: |
|
|
try: inputs["LL"] = float(user_in) |
|
|
except: inputs["LL"] = 0.0 |
|
|
add_bot("Enter Plastic Limit (PL):") |
|
|
site["classifier_state"] = 5 |
|
|
elif state == 5: |
|
|
try: inputs["PL"] = float(user_in) |
|
|
except: inputs["PL"] = 0.0 |
|
|
uscs, desc1 = classify_uscs(inputs) |
|
|
aashto, desc2, gi = classify_aashto(inputs) |
|
|
add_bot(f"Classification complete ✅ USCS={uscs} ({desc1}), AASHTO={aashto} (GI={gi}, {desc2})") |
|
|
full_report = query_llm_for_soil(uscs, aashto, desc1, desc2, gi) |
|
|
add_bot(full_report) |
|
|
site["classifier_state"] = -1 |
|
|
|
|
|
save_active_site(site) |
|
|
st.rerun() |
|
|
|
|
|
if site["classifier_state"] == -1: |
|
|
if st.button("📄 Export Classification Report"): |
|
|
site["classification_report"] = chat |
|
|
st.success("Report saved. Generate full report in Reports Page.") |
|
|
|
|
|
|
|
|
|
|
|
def query_llm_for_soil(uscs_code, aashto_code, desc1, desc2, gi): |
|
|
"""Ask Groq LLM to expand classification into detailed engineering report.""" |
|
|
prompt = f""" |
|
|
Soil Classification Results: |
|
|
- USCS: {uscs_code} ({desc1}) |
|
|
- AASHTO: {aashto_code} ({desc2}), Group Index={gi} |
|
|
|
|
|
Provide: |
|
|
1. Engineering characteristics (compressibility, permeability, shear strength, settlement, frost susceptibility). |
|
|
2. Construction applications (foundations, embankments, pavements). |
|
|
3. Typical stabilization or improvement methods. |
|
|
4. Warnings or limitations. |
|
|
|
|
|
Be detailed but concise, use professional engineering language. |
|
|
""" |
|
|
return groq_chat(prompt) |
|
|
|
|
|
|
|
|
|
|
|
def gsd_curve_ui(): |
|
|
st.header("📊 Grain Size Distribution (GSD) Curve") |
|
|
site = get_active_site() |
|
|
|
|
|
st.info("Upload sieve analysis data (CSV: Sieve size [mm], %Passing). Or manually enter D-values.") |
|
|
|
|
|
uploaded = st.file_uploader("Upload CSV", type=["csv"], key="gsd_csv") |
|
|
data = None |
|
|
if uploaded: |
|
|
df = pd.read_csv(uploaded) |
|
|
st.write(df) |
|
|
try: |
|
|
sizes = df.iloc[:,0].values |
|
|
passing = df.iloc[:,1].values |
|
|
data = (sizes, passing) |
|
|
except Exception as e: |
|
|
st.error(f"Error parsing CSV: {e}") |
|
|
|
|
|
if data is not None: |
|
|
sizes, passing = data |
|
|
|
|
|
fig, ax = plt.subplots() |
|
|
ax.semilogx(sizes, passing, marker="o", color="orange") |
|
|
ax.set_xlabel("Sieve Size (mm, log scale)") |
|
|
ax.set_ylabel("% Passing") |
|
|
ax.set_title("Grain Size Distribution Curve") |
|
|
ax.grid(True, which="both", linestyle="--", linewidth=0.5) |
|
|
st.pyplot(fig) |
|
|
|
|
|
|
|
|
def interpD(target): |
|
|
return np.interp(target, passing[::-1], sizes[::-1]) |
|
|
D10 = interpD(10) |
|
|
D30 = interpD(30) |
|
|
D60 = interpD(60) |
|
|
Cu = D60/D10 if D10>0 else None |
|
|
Cc = (D30**2)/(D60*D10) if D10>0 and D60>0 else None |
|
|
|
|
|
st.write(f"D10={D10:.3f} mm, D30={D30:.3f} mm, D60={D60:.3f} mm") |
|
|
st.write(f"Cu={Cu:.2f}, Cc={Cc:.2f}") |
|
|
|
|
|
site["GSD"] = {"D10":D10,"D30":D30,"D60":D60,"Cu":Cu,"Cc":Cc} |
|
|
save_active_site(site) |
|
|
|
|
|
with st.expander("Manual entry"): |
|
|
c1,c2,c3 = st.columns(3) |
|
|
D10 = c1.number_input("D10 (mm)", value=0.0) |
|
|
D30 = c2.number_input("D30 (mm)", value=0.0) |
|
|
D60 = c3.number_input("D60 (mm)", value=0.0) |
|
|
if st.button("Save D-values"): |
|
|
site["GSD"] = {"D10":D10,"D30":D30,"D60":D60} |
|
|
save_active_site(site) |
|
|
st.success("Saved to site.") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os |
|
|
import json |
|
|
import tempfile |
|
|
import traceback |
|
|
import base64 |
|
|
import time |
|
|
from math import isnan |
|
|
|
|
|
import streamlit as st |
|
|
from PIL import Image |
|
|
import io |
|
|
|
|
|
|
|
|
try: |
|
|
import geemap.foliumap as geemap |
|
|
except Exception: |
|
|
geemap = None |
|
|
|
|
|
try: |
|
|
import ee |
|
|
except Exception: |
|
|
ee = None |
|
|
|
|
|
|
|
|
try: |
|
|
import asyncio |
|
|
from pyppeteer import launch as pyppeteer_launch |
|
|
_HAS_PYPPETEER = True |
|
|
except Exception: |
|
|
_HAS_PYPPETEER = False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DATA_DIR = st.cache_resource(lambda: (os.environ.get("GEOMATE_DATA_DIR", "./data"))) |
|
|
os.makedirs(DATA_DIR, exist_ok=True) |
|
|
SITES_FILE = os.path.join(DATA_DIR, "sites.json") |
|
|
|
|
|
def load_sites(): |
|
|
if not os.path.exists(SITES_FILE): |
|
|
|
|
|
default = [{ |
|
|
"Site Name": "Site 1", |
|
|
"Coordinates": None, |
|
|
"lat": None, |
|
|
"lon": None, |
|
|
"Load Bearing Capacity": None, |
|
|
"Skin Shear Strength": None, |
|
|
"Relative Compaction": None, |
|
|
"Rate of Consolidation": None, |
|
|
"Nature of Construction": None, |
|
|
"Soil Profile": None, |
|
|
"Flood Data": None, |
|
|
"Seismic Data": None, |
|
|
"Topography": None, |
|
|
"GSD": None, |
|
|
"USCS": None, |
|
|
"AASHTO": None, |
|
|
"GI": None, |
|
|
"classifier_inputs": {}, |
|
|
"classifier_decision_path": "", |
|
|
"chat_history": [], |
|
|
"report_convo_state": 0, |
|
|
"map_snapshot": None |
|
|
}] |
|
|
with open(SITES_FILE, "w") as f: |
|
|
json.dump(default, f, indent=2) |
|
|
return default |
|
|
try: |
|
|
with open(SITES_FILE, "r") as f: |
|
|
return json.load(f) |
|
|
except Exception: |
|
|
return [] |
|
|
|
|
|
def save_sites(sites): |
|
|
with open(SITES_FILE, "w") as f: |
|
|
json.dump(sites, f, indent=2) |
|
|
|
|
|
def get_active_site_index(): |
|
|
|
|
|
idx = st.session_state.get("active_site_index", 0) |
|
|
sites = load_sites() |
|
|
if idx < 0 or idx >= len(sites): |
|
|
idx = 0 |
|
|
st.session_state["active_site_index"] = 0 |
|
|
return idx |
|
|
|
|
|
def get_active_site(): |
|
|
sites = load_sites() |
|
|
idx = get_active_site_index() |
|
|
return sites[idx] |
|
|
|
|
|
def save_active_site(site_obj): |
|
|
sites = load_sites() |
|
|
idx = get_active_site_index() |
|
|
sites[idx] = site_obj |
|
|
save_sites(sites) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
EE_READY = False |
|
|
EE_INIT_ERROR = None |
|
|
|
|
|
def init_earth_engine(): |
|
|
global EE_READY, EE_INIT_ERROR |
|
|
if ee is None: |
|
|
EE_INIT_ERROR = "earthengine-api not installed." |
|
|
EE_READY = False |
|
|
return EE_READY |
|
|
try: |
|
|
|
|
|
if "EARTH_ENGINE_KEY" not in st.secrets or "SERVICE_ACCOUNT" not in st.secrets: |
|
|
EE_INIT_ERROR = "Missing EARTH_ENGINE_KEY or SERVICE_ACCOUNT in Streamlit secrets." |
|
|
EE_READY = False |
|
|
return EE_READY |
|
|
|
|
|
raw_key = st.secrets["EARTH_ENGINE_KEY"] |
|
|
service_account = st.secrets["SERVICE_ACCOUNT"] |
|
|
|
|
|
|
|
|
if isinstance(raw_key, str): |
|
|
try: |
|
|
key_json = json.loads(raw_key) |
|
|
except Exception: |
|
|
|
|
|
try: |
|
|
with open(raw_key, "r") as f: |
|
|
key_json = json.load(f) |
|
|
except Exception as ex: |
|
|
EE_INIT_ERROR = f"Could not parse EARTH_ENGINE_KEY: {ex}" |
|
|
EE_READY = False |
|
|
return EE_READY |
|
|
elif isinstance(raw_key, dict): |
|
|
key_json = raw_key |
|
|
else: |
|
|
EE_INIT_ERROR = "EARTH_ENGINE_KEY must be JSON string or dict." |
|
|
EE_READY = False |
|
|
return EE_READY |
|
|
|
|
|
|
|
|
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".json") |
|
|
tmp.write(json.dumps(key_json).encode("utf-8")) |
|
|
tmp.flush() |
|
|
tmp.close() |
|
|
key_path = tmp.name |
|
|
|
|
|
|
|
|
creds = ee.ServiceAccountCredentials(service_account, key_path) |
|
|
ee.Initialize(creds) |
|
|
|
|
|
try: |
|
|
os.remove(key_path) |
|
|
except Exception: |
|
|
pass |
|
|
EE_READY = True |
|
|
EE_INIT_ERROR = None |
|
|
return True |
|
|
except Exception as e: |
|
|
EE_INIT_ERROR = str(e) |
|
|
EE_READY = False |
|
|
return False |
|
|
|
|
|
|
|
|
if not EE_READY: |
|
|
init_earth_engine() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def normalize_coords_for_ee(coords): |
|
|
""" |
|
|
Accepts coords in a few common formats (list of [lat,lon] or [lon,lat], nested) |
|
|
Returns a polygon coordinate acceptable to ee.Geometry.Polygon: [[ [lon,lat], ... ]] |
|
|
""" |
|
|
if not coords: |
|
|
raise ValueError("Empty coordinates") |
|
|
|
|
|
if isinstance(coords[0][0], (list, tuple)): |
|
|
ring = coords[0] |
|
|
else: |
|
|
ring = coords |
|
|
|
|
|
|
|
|
first = ring[0] |
|
|
try: |
|
|
a, b = float(first[0]), float(first[1]) |
|
|
except Exception: |
|
|
raise ValueError("Invalid coordinate format") |
|
|
|
|
|
|
|
|
if -180 <= a <= 180 and -90 <= b <= 90: |
|
|
lonlat = [[float(x), float(y)] for x,y in ring] |
|
|
elif -90 <= a <= 90 and -180 <= b <= 180: |
|
|
|
|
|
lonlat = [[float(y), float(x)] for x,y in ring] |
|
|
else: |
|
|
|
|
|
lonlat = [[float(y), float(x)] for x,y in ring] |
|
|
|
|
|
return [lonlat] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def safe_reduce_region(image, geom, scale): |
|
|
try: |
|
|
stats = image.reduceRegion(reducer=ee.Reducer.mean(), geometry=geom, scale=scale, maxPixels=1e9) |
|
|
return stats.getInfo() |
|
|
except Exception as e: |
|
|
return {"error": str(e)} |
|
|
|
|
|
def fetch_flood_data(aoi_geom): |
|
|
""" |
|
|
Uses JRC Global Surface Water MonthlyHistory water band to compute mean water occurrence. |
|
|
Returns a dict with mean 'water' value (0..1) representing fraction of months with water. |
|
|
""" |
|
|
try: |
|
|
coll = ee.ImageCollection("JRC/GSW1_4/MonthlyHistory").select("water") |
|
|
img = coll.mean() |
|
|
info = safe_reduce_region(img, aoi_geom, scale=30) |
|
|
|
|
|
if info and "water" in info and isinstance(info["water"], (int, float)): |
|
|
water_mean = info["water"] |
|
|
|
|
|
try: |
|
|
water_pct = float(water_mean) * 100.0 |
|
|
except: |
|
|
water_pct = None |
|
|
return {"water_mean": water_mean, "water_percent": water_pct} |
|
|
return info |
|
|
except Exception as e: |
|
|
return {"error": str(e)} |
|
|
|
|
|
def fetch_seismic_data(aoi_geom): |
|
|
""" |
|
|
Fetch PGA (Peak ground acceleration) mean from a global PGA dataset. |
|
|
""" |
|
|
try: |
|
|
img = ee.Image("USGS/GME/hazards/seismic/2013_PGA_10pct_50yr") |
|
|
info = safe_reduce_region(img, aoi_geom, scale=1000) |
|
|
|
|
|
return info |
|
|
except Exception as e: |
|
|
return {"error": str(e)} |
|
|
|
|
|
def fetch_topography_data(aoi_geom): |
|
|
try: |
|
|
dem = ee.Image("USGS/SRTMGL1_003") |
|
|
info = safe_reduce_region(dem, aoi_geom, scale=90) |
|
|
return info |
|
|
except Exception as e: |
|
|
return {"error": str(e)} |
|
|
|
|
|
|
|
|
_SOIL_CODE_MAP = { |
|
|
0: "No data", |
|
|
1: "Sand (USDA texture class)", |
|
|
2: "Loamy sand", |
|
|
3: "Sandy loam", |
|
|
4: "Loam", |
|
|
5: "Silt loam", |
|
|
6: "Silt", |
|
|
7: "Silty clay loam", |
|
|
8: "Silty clay", |
|
|
9: "Clay loam", |
|
|
10: "Sandy clay loam", |
|
|
11: "Sandy clay", |
|
|
12: "Clay", |
|
|
|
|
|
} |
|
|
|
|
|
def fetch_soil_data(aoi_geom): |
|
|
try: |
|
|
img = ee.Image("OpenLandMap/SOL/SOL_TEXTURE-CLASS_USDA-TT_M/v02") |
|
|
stats = img.reduceRegion(reducer=ee.Reducer.mode(), geometry=aoi_geom, scale=250, maxPixels=1e9) |
|
|
if stats and "b1" in stats: |
|
|
code = stats["b1"] |
|
|
human = _SOIL_CODE_MAP.get(int(code), f"Texture code {code}") |
|
|
return {"mode_code": code, "description": human} |
|
|
return stats |
|
|
except Exception as e: |
|
|
return {"error": str(e)} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def save_map_html_and_try_png(map_obj, site_name="site"): |
|
|
""" |
|
|
Save map HTML and attempt to render PNG using pyppeteer (headless chromium). |
|
|
Returns dict with keys: html (str), png_bytes (bytes | None), error (str | None) |
|
|
""" |
|
|
result = {"html": None, "png_bytes": None, "error": None} |
|
|
try: |
|
|
html = map_obj.to_html() |
|
|
result["html"] = html |
|
|
except Exception as e: |
|
|
result["error"] = f"Failed to generate HTML from map: {e}" |
|
|
return result |
|
|
|
|
|
|
|
|
if _HAS_PYPPETEER: |
|
|
try: |
|
|
tmp_html = tempfile.NamedTemporaryFile(delete=False, suffix=".html") |
|
|
tmp_html.write(html.encode("utf-8")) |
|
|
tmp_html.flush() |
|
|
tmp_html.close() |
|
|
tmp_path = tmp_html.name |
|
|
|
|
|
async def render(): |
|
|
browser = await pyppeteer_launch(args=['--no-sandbox'], headless=True) |
|
|
page = await browser.newPage() |
|
|
await page.setViewport({"width": 1200, "height": 800}) |
|
|
await page.goto("file://" + tmp_path) |
|
|
await asyncio.sleep(1.5) |
|
|
png = await page.screenshot({'fullPage': True}) |
|
|
await browser.close() |
|
|
return png |
|
|
|
|
|
png_bytes = asyncio.get_event_loop().run_until_complete(render()) |
|
|
result["png_bytes"] = png_bytes |
|
|
|
|
|
try: |
|
|
os.remove(tmp_path) |
|
|
except: |
|
|
pass |
|
|
return result |
|
|
except Exception as e: |
|
|
result["error"] = f"pyppeteer PNG capture failed: {e}" |
|
|
|
|
|
return result |
|
|
else: |
|
|
result["error"] = "pyppeteer not available to render PNG. HTML saved." |
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def humanize_flood_info(flood_dict): |
|
|
|
|
|
if not flood_dict: |
|
|
return "No flood data" |
|
|
if "error" in flood_dict: |
|
|
return f"Error fetching flood data: {flood_dict['error']}" |
|
|
wm = flood_dict.get("water_mean") |
|
|
wp = flood_dict.get("water_percent") |
|
|
if wp is None and wm is not None: |
|
|
try: |
|
|
wp = float(wm) * 100.0 |
|
|
except: |
|
|
wp = None |
|
|
if wp is None: |
|
|
return f"Flood data (raw): {flood_dict}" |
|
|
else: |
|
|
|
|
|
severity = "Low" |
|
|
if wp >= 50: |
|
|
severity = "Very high" |
|
|
elif wp >= 20: |
|
|
severity = "High" |
|
|
elif wp >= 5: |
|
|
severity = "Moderate" |
|
|
return f"Historic water occurrence mean: {wp:.2f}% → {severity} flood occurrence in AOI." |
|
|
|
|
|
def humanize_seismic_info(seis_dict): |
|
|
if not seis_dict: |
|
|
return "No seismic data" |
|
|
if "error" in seis_dict: |
|
|
return f"Error fetching seismic data: {seis_dict['error']}" |
|
|
|
|
|
|
|
|
|
|
|
for k,v in seis_dict.items(): |
|
|
try: |
|
|
val = float(v) |
|
|
|
|
|
g_val = val |
|
|
|
|
|
if g_val <= 0.02: |
|
|
level = "Low" |
|
|
elif g_val <= 0.05: |
|
|
level = "Moderate" |
|
|
elif g_val <= 0.15: |
|
|
level = "High" |
|
|
else: |
|
|
level = "Very high" |
|
|
return f"Seismic: {k} = {val:.4f} (approx. {level} PGA)." |
|
|
except Exception: |
|
|
continue |
|
|
return f"Seismic raw data: {seis_dict}" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def locator_ui(): |
|
|
st.header("🌍 Locator (Earth Engine Powered)") |
|
|
sites = load_sites() |
|
|
idx = get_active_site_index() |
|
|
site = sites[idx] |
|
|
|
|
|
|
|
|
cols = st.columns([3,1,1]) |
|
|
with cols[0]: |
|
|
st.markdown("**Active site:**") |
|
|
site_name = st.text_input("Site name", value=site.get("Site Name", f"Site {idx+1}")) |
|
|
with cols[1]: |
|
|
if st.button("Save site name"): |
|
|
site["Site Name"] = site_name |
|
|
save_active_site(site) |
|
|
st.success("Site name updated.") |
|
|
with cols[2]: |
|
|
if st.button("New site"): |
|
|
|
|
|
new_site = { |
|
|
"Site Name": f"Site {len(sites)+1}", |
|
|
"Coordinates": None, |
|
|
"lat": None, |
|
|
"lon": None, |
|
|
"Load Bearing Capacity": None, |
|
|
"Skin Shear Strength": None, |
|
|
"Relative Compaction": None, |
|
|
"Rate of Consolidation": None, |
|
|
"Nature of Construction": None, |
|
|
"Soil Profile": None, |
|
|
"Flood Data": None, |
|
|
"Seismic Data": None, |
|
|
"Topography": None, |
|
|
"GSD": None, |
|
|
"USCS": None, |
|
|
"AASHTO": None, |
|
|
"GI": None, |
|
|
"classifier_inputs": {}, |
|
|
"classifier_decision_path": "", |
|
|
"chat_history": [], |
|
|
"report_convo_state": 0, |
|
|
"map_snapshot": None |
|
|
} |
|
|
sites.append(new_site) |
|
|
save_sites(sites) |
|
|
st.session_state["active_site_index"] = len(sites)-1 |
|
|
st.experimental_rerun() |
|
|
|
|
|
|
|
|
if not EE_READY: |
|
|
init_earth_engine() |
|
|
|
|
|
if geemap is None: |
|
|
st.error("geemap is not installed in the environment. Install via `pip install geemap` and earthengine-api.") |
|
|
return |
|
|
if ee is None: |
|
|
st.error("earthengine-api not installed. Install it and provide service account key in secrets.") |
|
|
return |
|
|
|
|
|
|
|
|
center = [20, 78] |
|
|
if site.get("Coordinates"): |
|
|
try: |
|
|
coords = site.get("Coordinates") |
|
|
|
|
|
if isinstance(coords[0][0], (list, tuple)): |
|
|
|
|
|
pt = coords[0][0] |
|
|
else: |
|
|
pt = coords[0] |
|
|
|
|
|
a,b = float(pt[0]), float(pt[1]) |
|
|
if -90 <= a <= 90 and -180 <= b <= 180: |
|
|
center = [a,b] |
|
|
else: |
|
|
center = [b,a] |
|
|
except Exception: |
|
|
pass |
|
|
|
|
|
m = geemap.Map(center=center, zoom=6, plugin_Draw=True, Draw_export=True, locate_control=True) |
|
|
try: |
|
|
m.add_basemap("HYBRID") |
|
|
except Exception: |
|
|
pass |
|
|
|
|
|
with st.expander("📌 Locator instructions"): |
|
|
st.markdown(""" |
|
|
- Use the draw tools to mark AOI (polygon/rectangle/circle). |
|
|
- Click the crosshair to auto-locate. |
|
|
- After drawing, click **Get AOI & Extract Data**. |
|
|
- If Earth Engine is unavailable extraction will be skipped but AOI will be saved. |
|
|
""") |
|
|
|
|
|
|
|
|
try: |
|
|
m.to_streamlit(height=520) |
|
|
except Exception as e: |
|
|
st.error(f"Map rendering failed: {e}") |
|
|
return |
|
|
|
|
|
|
|
|
if st.button("📥 Get AOI & Extract Data"): |
|
|
try: |
|
|
|
|
|
coords = None |
|
|
try: |
|
|
coords = m.user_roi_bounds() |
|
|
except Exception: |
|
|
try: |
|
|
geojson = m.get_drawn_geojson() if hasattr(m, "get_drawn_geojson") else None |
|
|
if geojson and "features" in geojson and len(geojson["features"])>0: |
|
|
coords = geojson["features"][0]["geometry"]["coordinates"] |
|
|
except Exception: |
|
|
coords = None |
|
|
|
|
|
if not coords: |
|
|
st.warning("No AOI found. Draw a polygon/rectangle and try again.") |
|
|
else: |
|
|
|
|
|
try: |
|
|
lonlat_poly = normalize_coords_for_ee(coords) |
|
|
aoi = ee.Geometry.Polygon(lonlat_poly) |
|
|
except Exception as e: |
|
|
st.error(f"Coordinate normalization failed: {e}") |
|
|
st.stop() |
|
|
|
|
|
|
|
|
site["Coordinates"] = coords |
|
|
save_active_site(site) |
|
|
|
|
|
|
|
|
if EE_READY: |
|
|
with st.spinner("Querying Earth Engine (flood, seismic, topo, soil)..."): |
|
|
flood = fetch_flood_data(aoi) |
|
|
seismic = fetch_seismic_data(aoi) |
|
|
topo = fetch_topography_data(aoi) |
|
|
soil = fetch_soil_data(aoi) |
|
|
|
|
|
|
|
|
site["Flood Data"] = flood |
|
|
site["Seismic Data"] = seismic |
|
|
site["Topography"] = topo |
|
|
site["Soil Profile"] = soil |
|
|
save_active_site(site) |
|
|
|
|
|
st.success("✅ Data extracted and saved to site.") |
|
|
st.markdown("### Extracted summary") |
|
|
st.write(humanize_flood_info(flood)) |
|
|
st.write(humanize_seismic_info(seismic)) |
|
|
st.write(f"Topography (SRTM mean): {topo}") |
|
|
st.write(f"Soil profile (mode): {soil}") |
|
|
|
|
|
|
|
|
snap = save_map_html_and_try_png(m, site.get("Site Name","site")) |
|
|
if snap.get("html"): |
|
|
site["map_snapshot_html"] = snap["html"] |
|
|
if snap.get("png_bytes"): |
|
|
site["map_snapshot_png"] = base64.b64encode(snap["png_bytes"]).decode("utf-8") |
|
|
save_active_site(site) |
|
|
if snap.get("png_bytes"): |
|
|
st.image(Image.open(io.BytesIO(snap["png_bytes"])), caption="Map snapshot (PNG)", use_column_width=True) |
|
|
else: |
|
|
st.info("Map PNG snapshot not available; HTML snapshot saved in site data.") |
|
|
else: |
|
|
st.info("Earth Engine unavailable — AOI saved locally.") |
|
|
st.success("AOI saved to site.") |
|
|
except Exception as e: |
|
|
st.error(f"Extraction failed: {e}\n{traceback.format_exc()}") |
|
|
|
|
|
|
|
|
if site.get("Flood Data") or site.get("Seismic Data") or site.get("Topography") or site.get("Soil Profile"): |
|
|
st.markdown("### Cached site dataset (last extraction)") |
|
|
st.json({ |
|
|
"Flood": site.get("Flood Data"), |
|
|
"Seismic": site.get("Seismic Data"), |
|
|
"Topography": site.get("Topography"), |
|
|
"Soil": site.get("Soil Profile") |
|
|
}) |
|
|
|
|
|
|
|
|
site["Site Name"] = site_name |
|
|
save_active_site(site) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import pytesseract |
|
|
from PIL import Image |
|
|
import fitz |
|
|
|
|
|
def extract_text_from_file(uploaded_file): |
|
|
"""OCR utility for images and PDFs""" |
|
|
try: |
|
|
if uploaded_file.type in ["image/png", "image/jpeg", "image/jpg"]: |
|
|
img = Image.open(uploaded_file) |
|
|
text = pytesseract.image_to_string(img) |
|
|
return text.strip() |
|
|
elif uploaded_file.type == "application/pdf": |
|
|
text_pages = [] |
|
|
pdf = fitz.open(stream=uploaded_file.read(), filetype="pdf") |
|
|
for page in pdf: |
|
|
text_pages.append(page.get_text("text")) |
|
|
|
|
|
if not text_pages[-1].strip(): |
|
|
pix = page.get_pixmap() |
|
|
img = Image.frombytes("RGB", [pix.width, pix.height], pix.samples) |
|
|
text_pages[-1] = pytesseract.image_to_string(img) |
|
|
return "\n".join(text_pages).strip() |
|
|
else: |
|
|
return "" |
|
|
except Exception as e: |
|
|
st.error(f"OCR failed: {e}") |
|
|
return "" |
|
|
|
|
|
def rag_ui(): |
|
|
st.header("🤖 GeoMate Ask — RAG + Groq (per-site memory + OCR)") |
|
|
site = get_active_site() |
|
|
|
|
|
if "chat_history" not in site: |
|
|
site["chat_history"] = [] |
|
|
|
|
|
st.markdown( |
|
|
"**Context:** The RAG uses your FAISS knowledge base (upload .zip in this page), " |
|
|
"Groq LLM for answers, and optional OCR from uploaded images/PDFs. " |
|
|
"Chat history is saved per site." |
|
|
) |
|
|
|
|
|
|
|
|
with st.expander("📂 Upload FAISS DB (zip with index.faiss + meta.pkl)"): |
|
|
uploaded = st.file_uploader("Upload faiss_books_db.zip", type=["zip"], key="faiss_db_uploader") |
|
|
if uploaded: |
|
|
tmpf = tempfile.NamedTemporaryFile(delete=False, suffix=".zip") |
|
|
tmpf.write(uploaded.getvalue()) |
|
|
tmpf.flush() |
|
|
ix, meta = load_faiss_db_from_zip(tmpf.name) |
|
|
if ix is not None: |
|
|
ss["faiss_index"] = ix |
|
|
ss["faiss_meta"] = meta |
|
|
st.success("✅ FAISS DB loaded.") |
|
|
|
|
|
|
|
|
for turn in site.get("chat_history", []): |
|
|
role, text = turn.get("role"), turn.get("text") |
|
|
if role == "bot": |
|
|
st.markdown( |
|
|
f"<div style='background:{THEME['bubble_bg']};padding:8px;border-radius:12px;border:2px solid {THEME['accent']};'>" |
|
|
f"<b>🤖 GeoMate:</b> {text}</div>", unsafe_allow_html=True |
|
|
) |
|
|
else: |
|
|
st.markdown( |
|
|
f"<div style='background:#1a2436;color:#fff;padding:8px;border-radius:12px;margin-left:40px;'>" |
|
|
f"<b>👤 You:</b> {text}</div>", unsafe_allow_html=True |
|
|
) |
|
|
|
|
|
|
|
|
user_q = st.text_input("Ask GeoMate:", key="geomate_rag_input") |
|
|
uploaded_ocr = st.file_uploader("Optional OCR input (image/pdf)", type=["png","jpg","jpeg","pdf"], key="rag_ocr_uploader") |
|
|
|
|
|
if st.button("Ask", key="geomate_rag_button"): |
|
|
if not user_q.strip() and not uploaded_ocr: |
|
|
st.warning("Please type a question or upload a file.") |
|
|
else: |
|
|
query_text = user_q.strip() |
|
|
if uploaded_ocr: |
|
|
with st.spinner("Running OCR..."): |
|
|
ocr_text = extract_text_from_file(uploaded_ocr) |
|
|
if ocr_text: |
|
|
query_text += "\n\n[OCR Extracted Content]\n" + ocr_text |
|
|
|
|
|
|
|
|
site["chat_history"].append({"role":"user","text":query_text, "time":datetime.utcnow().isoformat()}) |
|
|
save_active_site(site) |
|
|
|
|
|
|
|
|
with st.spinner("Retrieving context and calling LLM..."): |
|
|
answer = rag_retrieve_and_answer(query_text, topk=5) |
|
|
|
|
|
|
|
|
site["chat_history"].append({"role":"bot","text":answer, "time":datetime.utcnow().isoformat()}) |
|
|
|
|
|
|
|
|
site = update_site_description_from_text(site, query_text + "\n" + answer) |
|
|
save_active_site(site) |
|
|
|
|
|
st.rerun() |
|
|
|
|
|
|
|
|
colA, colB = st.columns(2) |
|
|
if colA.button("💾 Save Chat"): |
|
|
save_active_site(site) |
|
|
st.success("Chat saved into site JSON.") |
|
|
if colB.button("🗑️ Clear Chat"): |
|
|
site["chat_history"] = [] |
|
|
save_active_site(site) |
|
|
st.success("Cleared history for this site.") |
|
|
|
|
|
|
|
|
|
|
|
import io, os, json |
|
|
from datetime import datetime |
|
|
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak, Image as RLImage |
|
|
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle |
|
|
from reportlab.lib.pagesizes import A4 |
|
|
from reportlab.lib.units import mm |
|
|
from reportlab.lib import colors |
|
|
import streamlit as st |
|
|
|
|
|
|
|
|
THEME = {"accent": colors.HexColor("#FF6600")} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build_classification_pdf_bytes(site: dict): |
|
|
""" |
|
|
Build classification-only PDF (returns bytes) |
|
|
""" |
|
|
buf = io.BytesIO() |
|
|
doc = SimpleDocTemplate( |
|
|
buf, pagesize=A4, |
|
|
leftMargin=20*mm, rightMargin=20*mm, |
|
|
topMargin=20*mm, bottomMargin=20*mm |
|
|
) |
|
|
styles = getSampleStyleSheet() |
|
|
title = ParagraphStyle("title", parent=styles["Title"], fontSize=20, textColor=THEME["accent"], alignment=1) |
|
|
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=THEME["accent"]) |
|
|
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10) |
|
|
|
|
|
elems = [] |
|
|
elems.append(Paragraph("Classification Report — GeoMate V2", title)) |
|
|
elems.append(Spacer(1, 8)) |
|
|
elems.append(Paragraph(f"Site: {site.get('Site Name','-')}", h1)) |
|
|
elems.append(Paragraph(f"Coordinates: {site.get('Coordinates','-')}", body)) |
|
|
elems.append(Spacer(1, 6)) |
|
|
|
|
|
|
|
|
inputs = site.get("classifier_inputs", {}) |
|
|
if inputs: |
|
|
data = [["Parameter", "Value"]] |
|
|
for k, v in inputs.items(): |
|
|
data.append([k, str(v)]) |
|
|
t = Table(data, colWidths=[80*mm, 80*mm]) |
|
|
t.setStyle(TableStyle([ |
|
|
("GRID", (0,0), (-1,-1), 0.5, colors.grey), |
|
|
("BACKGROUND", (0,0), (-1,0), THEME["accent"]), |
|
|
("TEXTCOLOR", (0,0), (-1,0), colors.white) |
|
|
])) |
|
|
elems.append(t) |
|
|
elems.append(Spacer(1, 8)) |
|
|
|
|
|
|
|
|
elems.append(Paragraph("Results", h1)) |
|
|
elems.append(Paragraph(f"USCS: {site.get('USCS','N/A')}", body)) |
|
|
elems.append(Paragraph(f"AASHTO: {site.get('AASHTO','N/A')} (GI: {site.get('GI','N/A')})", body)) |
|
|
|
|
|
|
|
|
if site.get("ocr_text"): |
|
|
elems.append(Spacer(1, 8)) |
|
|
elems.append(Paragraph("OCR Extracted Notes", h1)) |
|
|
elems.append(Paragraph(site.get("ocr_text","No OCR data found."), body)) |
|
|
|
|
|
|
|
|
gsd = site.get("GSD") |
|
|
if gsd: |
|
|
elems.append(Spacer(1, 8)) |
|
|
elems.append(Paragraph("Grain Size Distribution (GSD)", h1)) |
|
|
elems.append(Paragraph(f"D10: {gsd.get('D10')}, D30: {gsd.get('D30')}, D60: {gsd.get('D60')}", body)) |
|
|
gsd_img_path = "/tmp/geomate_gsd_plot.png" |
|
|
if os.path.exists(gsd_img_path): |
|
|
elems.append(Spacer(1, 6)) |
|
|
elems.append(RLImage(gsd_img_path, width=150*mm, height=80*mm)) |
|
|
|
|
|
elems.append(Spacer(1, 10)) |
|
|
elems.append(Paragraph("Decision path", h1)) |
|
|
elems.append(Paragraph(site.get("classifier_decision_path","Not recorded"), body)) |
|
|
|
|
|
doc.build(elems) |
|
|
pdf = buf.getvalue() |
|
|
buf.close() |
|
|
return pdf |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build_full_geotech_pdf_bytes(sites_list: list, external_refs: list): |
|
|
""" |
|
|
Build a full geotechnical report covering all selected sites. |
|
|
sites_list: list of site dictionaries |
|
|
Returns bytes of PDF. |
|
|
""" |
|
|
buf = io.BytesIO() |
|
|
doc = SimpleDocTemplate( |
|
|
buf, pagesize=A4, |
|
|
leftMargin=20*mm, rightMargin=20*mm, |
|
|
topMargin=20*mm, bottomMargin=20*mm |
|
|
) |
|
|
styles = getSampleStyleSheet() |
|
|
title = ParagraphStyle("title", parent=styles["Title"], fontSize=20, textColor=THEME["accent"], alignment=1) |
|
|
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=THEME["accent"]) |
|
|
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10) |
|
|
|
|
|
elems = [] |
|
|
elems.append(Paragraph("Full Geotechnical Investigation Report — GeoMate V2", title)) |
|
|
elems.append(Spacer(1, 6)) |
|
|
elems.append(Paragraph(f"Date: {datetime.today().strftime('%Y-%m-%d')}", body)) |
|
|
elems.append(Spacer(1, 10)) |
|
|
|
|
|
|
|
|
for s in sites_list: |
|
|
elems.append(Paragraph(f"Site: {s.get('Site Name','Unnamed')}", h1)) |
|
|
elems.append(Paragraph(f"Coordinates: {s.get('Coordinates','Not provided')}", body)) |
|
|
elems.append(Spacer(1, 6)) |
|
|
|
|
|
|
|
|
if s.get("ocr_text"): |
|
|
elems.append(Paragraph("OCR Extracted Notes", h1)) |
|
|
elems.append(Paragraph(s.get("ocr_text"), body)) |
|
|
elems.append(Spacer(1, 6)) |
|
|
|
|
|
|
|
|
elems.append(Paragraph("Classification", h1)) |
|
|
elems.append(Paragraph(f"USCS: {s.get('USCS','N/A')}", body)) |
|
|
elems.append(Paragraph(f"AASHTO: {s.get('AASHTO','N/A')} (GI: {s.get('GI','N/A')})", body)) |
|
|
|
|
|
|
|
|
if s.get("map_snapshot") and os.path.exists(s["map_snapshot"]): |
|
|
elems.append(Spacer(1, 6)) |
|
|
elems.append(Paragraph("Site Map Snapshot", h1)) |
|
|
elems.append(RLImage(s["map_snapshot"], width=140*mm, height=80*mm)) |
|
|
|
|
|
|
|
|
gsd = s.get("GSD") |
|
|
if gsd: |
|
|
elems.append(Spacer(1, 6)) |
|
|
elems.append(Paragraph("Grain Size Distribution", h1)) |
|
|
elems.append(Paragraph( |
|
|
f"D10: {gsd.get('D10')}, D30: {gsd.get('D30')}, " |
|
|
f"D60: {gsd.get('D60')}, Cu: {gsd.get('Cu')}, Cc: {gsd.get('Cc')}", |
|
|
body |
|
|
)) |
|
|
gsd_img = "/tmp/geomate_gsd_plot.png" |
|
|
if os.path.exists(gsd_img): |
|
|
elems.append(Spacer(1, 6)) |
|
|
elems.append(RLImage(gsd_img, width=150*mm, height=80*mm)) |
|
|
|
|
|
|
|
|
elems.append(Spacer(1, 8)) |
|
|
elems.append(Paragraph("Recommendations", h1)) |
|
|
if s.get("USCS") and s["USCS"].startswith("C"): |
|
|
elems.append(Paragraph(" - Clayey soils: check consolidation/settlement. Consider raft or pile foundations.", body)) |
|
|
else: |
|
|
elems.append(Paragraph(" - Granular soils: shallow foundations possible with compaction and drainage.", body)) |
|
|
|
|
|
elems.append(PageBreak()) |
|
|
|
|
|
|
|
|
refs = [] |
|
|
for s in sites_list: |
|
|
if s.get("rag_sources"): |
|
|
refs.extend(s["rag_sources"]) |
|
|
refs = list(set(refs)) |
|
|
refs.extend(external_refs) |
|
|
|
|
|
if refs: |
|
|
elems.append(Paragraph("References", h1)) |
|
|
for r in refs: |
|
|
elems.append(Paragraph(r, body)) |
|
|
|
|
|
doc.build(elems) |
|
|
pdf = buf.getvalue() |
|
|
buf.close() |
|
|
return pdf |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def reports_ui(): |
|
|
st.header("📑 Reports — Classification-only & Full Geotechnical Report") |
|
|
|
|
|
|
|
|
st.subheader("Classification-only report") |
|
|
sites = ss.get("sites", []) |
|
|
if not sites: |
|
|
st.warning("No sites available.") |
|
|
return |
|
|
|
|
|
site_names = [s.get("Site Name","Unnamed") for s in sites] |
|
|
sel_cls = st.selectbox("Select site", site_names, index=ss.get("active_site_idx",0)) |
|
|
if st.button("Generate Classification PDF"): |
|
|
site = ss["sites"][site_names.index(sel_cls)] |
|
|
pdf_bytes = build_classification_pdf_bytes(site) |
|
|
st.download_button( |
|
|
"Download Classification PDF", |
|
|
data=pdf_bytes, |
|
|
file_name=f"classification_{sel_cls}.pdf", |
|
|
mime="application/pdf" |
|
|
) |
|
|
|
|
|
st.markdown("---") |
|
|
|
|
|
|
|
|
st.subheader("Full Geotechnical Report") |
|
|
selected = st.multiselect("Sites to include", site_names, default=site_names) |
|
|
ext_refs_text = st.text_area("External references (one per line)") |
|
|
if st.button("Generate Full Report PDF"): |
|
|
if not selected: |
|
|
st.error("Select at least one site.") |
|
|
else: |
|
|
chosen_sites = [ss["sites"][site_names.index(n)] for n in selected] |
|
|
ext_refs = [l.strip() for l in ext_refs_text.splitlines() if l.strip()] |
|
|
with st.spinner("Building PDF (this may take a few seconds)..."): |
|
|
pdf_bytes = build_full_geotech_pdf_bytes(chosen_sites, ext_refs) |
|
|
st.download_button( |
|
|
"Download Full Geotechnical Report", |
|
|
data=pdf_bytes, |
|
|
file_name="geomate_full_report.pdf", |
|
|
mime="application/pdf" |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def ui_main_final(): |
|
|
|
|
|
with st.sidebar: |
|
|
st.markdown(f"<h3 style='color:{THEME['accent']};margin:6px 0;'>GeoMate V2</h3>", unsafe_allow_html=True) |
|
|
model = st.selectbox("Select LLM model", ["meta-llama/llama-4-maverick-17b-128e-instruct", "llama3-8b-8192", "gemma-7b-it"], index=0) |
|
|
ss["selected_model"] = model |
|
|
|
|
|
st.markdown("### Project Sites") |
|
|
|
|
|
cols = st.columns([3,1]) |
|
|
new_site_name = cols[0].text_input("New site name", key="sidebar_new_site_name") |
|
|
if cols[1].button("➕ Add"): |
|
|
if new_site_name.strip(): |
|
|
add_site(new_site_name.strip()) |
|
|
st.success(f"Added site {new_site_name.strip()}") |
|
|
else: |
|
|
add_site(f"Site-{len(ss['sites'])+1}") |
|
|
st.success("Added new site") |
|
|
|
|
|
st.markdown("Active site:") |
|
|
idx = st.radio("Select active site", options=list(range(len(ss["sites"]))), format_func=lambda i: ss["sites"][i].get("Site Name","Site"), index=ss.get("active_site_idx",0)) |
|
|
ss["active_site_idx"] = idx |
|
|
|
|
|
with st.expander("Show active site JSON"): |
|
|
st.json(ss["sites"][ss["active_site_idx"]]) |
|
|
|
|
|
|
|
|
page = ss.get("page","Landing") |
|
|
if page == "Landing": |
|
|
landing_ui() |
|
|
elif page == "Soil Recognizer": |
|
|
soil_recognizer_ui() |
|
|
elif page == "Soil Classifier": |
|
|
|
|
|
soil_classifier_ui() |
|
|
elif page == "GSD Curve": |
|
|
gsd_curve_ui() |
|
|
elif page == "Locator": |
|
|
locator_ui() |
|
|
elif page == "GeoMate Ask": |
|
|
rag_ui() |
|
|
elif page == "Reports": |
|
|
reports_ui() |
|
|
else: |
|
|
st.info("Select a page from the sidebar.") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
ui_main_final() |
|
|
|
|
|
|
|
|
|