Update app.py
Browse files
app.py
CHANGED
|
@@ -1,100 +1,164 @@
|
|
| 1 |
-
#
|
| 2 |
-
#
|
| 3 |
-
#
|
| 4 |
-
#
|
| 5 |
-
#
|
| 6 |
-
#
|
| 7 |
-
|
| 8 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
import streamlit as st
|
| 10 |
st.set_page_config(page_title="GeoMate V2", page_icon="🌍", layout="wide", initial_sidebar_state="expanded")
|
| 11 |
|
| 12 |
-
#
|
| 13 |
-
import os
|
| 14 |
-
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
-
#
|
| 18 |
try:
|
| 19 |
import faiss
|
| 20 |
except Exception:
|
| 21 |
faiss = None
|
| 22 |
|
| 23 |
try:
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
except Exception:
|
| 26 |
-
|
| 27 |
|
| 28 |
try:
|
| 29 |
-
import ee
|
| 30 |
import geemap
|
|
|
|
| 31 |
except Exception:
|
| 32 |
-
ee = None
|
| 33 |
geemap = None
|
|
|
|
| 34 |
|
| 35 |
try:
|
| 36 |
-
|
| 37 |
-
from reportlab.lib.pagesizes import A4
|
| 38 |
-
from reportlab.lib.units import mm
|
| 39 |
-
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak
|
| 40 |
-
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
| 41 |
except Exception:
|
| 42 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
try:
|
| 44 |
-
|
| 45 |
except Exception:
|
| 46 |
-
|
| 47 |
|
| 48 |
-
#
|
| 49 |
-
|
| 50 |
-
"""
|
| 51 |
-
<style>
|
| 52 |
-
.bubble-bot {
|
| 53 |
-
background: linear-gradient(180deg,#111111,#1a1a1a);
|
| 54 |
-
color: #fff;
|
| 55 |
-
padding:10px 14px;
|
| 56 |
-
border-radius:12px;
|
| 57 |
-
border-left:4px solid #FF8C00;
|
| 58 |
-
margin:6px 0;
|
| 59 |
-
}
|
| 60 |
-
.bubble-user {
|
| 61 |
-
background: linear-gradient(180deg,#0f2b3a,#05202a);
|
| 62 |
-
color: #e6f7ff;
|
| 63 |
-
padding:10px 14px;
|
| 64 |
-
border-radius:12px;
|
| 65 |
-
margin:6px 0;
|
| 66 |
-
text-align:right;
|
| 67 |
-
}
|
| 68 |
-
.sidebar .stButton>button { border-radius:8px; }
|
| 69 |
-
.active-bubble { box-shadow: 0 0 0 3px rgba(255,122,0,0.12); }
|
| 70 |
-
</style>
|
| 71 |
-
""",
|
| 72 |
-
unsafe_allow_html=True,
|
| 73 |
-
)
|
| 74 |
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
REQUIRED_SECRETS.append("SERVICE_ACCOUNT") # or EARTH_ENGINE_KEY
|
| 80 |
|
| 81 |
-
|
| 82 |
if missing:
|
| 83 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
st.stop()
|
| 85 |
|
| 86 |
-
#
|
| 87 |
-
|
| 88 |
-
|
|
|
|
| 89 |
|
| 90 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 91 |
ss = st.session_state
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
if "sites" not in ss:
|
| 93 |
-
#
|
| 94 |
-
ss
|
| 95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 96 |
"Site Coordinates": "",
|
| 97 |
-
"lat": None,
|
|
|
|
| 98 |
"Load Bearing Capacity": None,
|
| 99 |
"Skin Shear Strength": None,
|
| 100 |
"Relative Compaction": None,
|
|
@@ -110,921 +174,1168 @@ if "sites" not in ss:
|
|
| 110 |
"GI": None,
|
| 111 |
"classifier_inputs": {},
|
| 112 |
"classifier_decision_path": "",
|
| 113 |
-
"chat_history": [],
|
| 114 |
"report_convo_state": 0,
|
| 115 |
"map_snapshot": None,
|
| 116 |
-
"
|
| 117 |
-
} ]
|
| 118 |
-
if "active_site_index" not in ss:
|
| 119 |
-
ss["active_site_index"] = 0
|
| 120 |
-
if "llm_model" not in ss:
|
| 121 |
-
ss["llm_model"] = "meta-llama/llama-4-maverick-17b-128e-instruct"
|
| 122 |
-
if "faiss_loaded" not in ss:
|
| 123 |
-
ss["faiss_loaded"] = False
|
| 124 |
-
if "ee_inited" not in ss:
|
| 125 |
-
ss["ee_inited"] = False
|
| 126 |
-
|
| 127 |
-
# 7. Utility helpers
|
| 128 |
-
def now_str():
|
| 129 |
-
return datetime.utcnow().strftime("%Y-%m-%d_%H%M%S")
|
| 130 |
-
|
| 131 |
-
def get_active_site() -> Dict[str,Any]:
|
| 132 |
-
return ss["sites"][ss["active_site_index"]]
|
| 133 |
-
|
| 134 |
-
def save_site_field(site_idx:int, key:str, value):
|
| 135 |
-
ss["sites"][site_idx][key] = value
|
| 136 |
-
|
| 137 |
-
def map_pretty_sites():
|
| 138 |
-
return [s.get("Site Name","Site") for s in ss["sites"]]
|
| 139 |
-
|
| 140 |
-
# 8. Engineering characteristics table (full detailed set -> expanded)
|
| 141 |
-
ENGINEERING_CHARACTERISTICS = {
|
| 142 |
-
"Gravel": {
|
| 143 |
-
"Settlement": "Negligible",
|
| 144 |
-
"Quicksand": "No",
|
| 145 |
-
"Frost-heaving": "None",
|
| 146 |
-
"Drainage": "Excellent",
|
| 147 |
-
"Cement grouting": "Possible",
|
| 148 |
-
"Notes": "Good bearing; suitable for foundations with minimal treatment."
|
| 149 |
-
},
|
| 150 |
-
"Coarse sand": {
|
| 151 |
-
"Settlement": "Negligible",
|
| 152 |
-
"Quicksand": "No",
|
| 153 |
-
"Frost-heaving": "None",
|
| 154 |
-
"Drainage": "Excellent",
|
| 155 |
-
"Cement grouting": "Possible if coarse",
|
| 156 |
-
"Notes": "Good compaction properties; typical for pavement subgrade if dense."
|
| 157 |
-
},
|
| 158 |
-
"Medium sand": {
|
| 159 |
-
"Settlement": "Low",
|
| 160 |
-
"Quicksand": "Unlikely",
|
| 161 |
-
"Frost-heaving": "None",
|
| 162 |
-
"Drainage": "Good",
|
| 163 |
-
"Notes": "Moderate bearing; check gradation."
|
| 164 |
-
},
|
| 165 |
-
"Fine sand": {
|
| 166 |
-
"Settlement": "Potentially small",
|
| 167 |
-
"Quicksand": "Possible in presence of groundwater",
|
| 168 |
-
"Frost-heaving": "Depends on fines",
|
| 169 |
-
"Drainage": "Fair",
|
| 170 |
-
"Notes": "Uniform fine sands may be susceptible to piping under flow."
|
| 171 |
-
},
|
| 172 |
-
"Silt": {
|
| 173 |
-
"Settlement": "Moderate to high",
|
| 174 |
-
"Quicksand": "Possible (liquefaction risk under seismic)",
|
| 175 |
-
"Frost-heaving": "Likely",
|
| 176 |
-
"Drainage": "Poor",
|
| 177 |
-
"Notes": "Silty soils often need stabilization for foundations."
|
| 178 |
-
},
|
| 179 |
-
"Clay": {
|
| 180 |
-
"Settlement": "High (consolidation possible)",
|
| 181 |
-
"Quicksand": "No",
|
| 182 |
-
"Frost-heaving": "Less than silt",
|
| 183 |
-
"Drainage": "Poor",
|
| 184 |
-
"Notes": "Clayey soils require careful foundation design; may be expansive."
|
| 185 |
}
|
| 186 |
-
}
|
| 187 |
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
char_summary = {"Notes":"Highly organic peat — large settlement potential; unsuitable without improvement."}
|
| 206 |
-
res = f"According to USCS, the soil is {uscs} — {uscs_expl}\nAccording to AASHTO, it is {aashto}."
|
| 207 |
-
return res, uscs, aashto, GI, char_summary
|
| 208 |
-
|
| 209 |
-
# numeric inputs
|
| 210 |
-
try:
|
| 211 |
-
P2 = float(inputs.get("P2",0.0))
|
| 212 |
-
except:
|
| 213 |
-
P2 = 0.0
|
| 214 |
-
try:
|
| 215 |
-
P4 = float(inputs.get("P4",0.0))
|
| 216 |
-
except:
|
| 217 |
-
P4 = 0.0
|
| 218 |
-
try:
|
| 219 |
-
D60 = float(inputs.get("D60",0.0))
|
| 220 |
-
D30 = float(inputs.get("D30",0.0))
|
| 221 |
-
D10 = float(inputs.get("D10",0.0))
|
| 222 |
-
except:
|
| 223 |
-
D60=D30=D10=0.0
|
| 224 |
-
try:
|
| 225 |
-
LL = float(inputs.get("LL",0.0))
|
| 226 |
-
PL = float(inputs.get("PL",0.0))
|
| 227 |
-
except:
|
| 228 |
-
LL=0.0; PL=0.0
|
| 229 |
-
PI = LL - PL
|
| 230 |
-
|
| 231 |
-
Cu = (D60 / D10) if (D10>0 and D60>0) else 0.0
|
| 232 |
-
Cc = ((D30**2) / (D10*D60)) if (D10>0 and D30>0 and D60>0) else 0.0
|
| 233 |
-
|
| 234 |
-
uscs = "Unknown"
|
| 235 |
-
uscs_expl = ""
|
| 236 |
-
# USCS logic (verbatim from your script)
|
| 237 |
-
if P2 <= 50:
|
| 238 |
-
# Coarse-Grained Soils
|
| 239 |
-
if P4 <= 50:
|
| 240 |
-
# Gravels
|
| 241 |
-
if Cu!=0 and Cc!=0:
|
| 242 |
-
if Cu >= 4 and 1 <= Cc <= 3:
|
| 243 |
-
uscs = "GW"; uscs_expl = "Well-graded gravel (good engineering properties, high strength, good drainage)."
|
| 244 |
-
else:
|
| 245 |
-
uscs = "GP"; uscs_expl = "Poorly-graded gravel (less favorable gradation)."
|
| 246 |
-
else:
|
| 247 |
-
if PI < 4 or PI < 0.73 * (LL - 20):
|
| 248 |
-
uscs = "GM"; uscs_expl = "Silty gravel (fines may reduce permeability and strength)."
|
| 249 |
-
elif PI > 7 and PI > 0.73 * (LL - 20):
|
| 250 |
-
uscs = "GC"; uscs_expl = "Clayey gravel (clayey fines increase plasticity, reduce strength)."
|
| 251 |
-
else:
|
| 252 |
-
uscs = "GM-GC"; uscs_expl = "Gravel with mixed silt/clay fines."
|
| 253 |
-
else:
|
| 254 |
-
# Sands
|
| 255 |
-
if Cu!=0 and Cc!=0:
|
| 256 |
-
if Cu >= 6 and 1 <= Cc <= 3:
|
| 257 |
-
uscs = "SW"; uscs_expl = "Well-graded sand (good compaction and drainage)."
|
| 258 |
-
else:
|
| 259 |
-
uscs = "SP"; uscs_expl = "Poorly-graded sand (uniform or gap-graded)."
|
| 260 |
-
else:
|
| 261 |
-
if PI < 4 or PI <= 0.73 * (LL - 20):
|
| 262 |
-
uscs = "SM"; uscs_expl = "Silty sand (fines are low-plasticity silt)."
|
| 263 |
-
elif PI > 7 and PI > 0.73 * (LL - 20):
|
| 264 |
-
uscs = "SC"; uscs_expl = "Clayey sand (clayey fines present; higher plasticity)."
|
| 265 |
-
else:
|
| 266 |
-
uscs = "SM-SC"; uscs_expl = "Transition between silty sand and clayey sand."
|
| 267 |
else:
|
| 268 |
-
|
| 269 |
-
nDS = int(inputs.get("nDS",5))
|
| 270 |
-
nDIL = int(inputs.get("nDIL",6))
|
| 271 |
-
nTG = int(inputs.get("nTG",6))
|
| 272 |
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
else:
|
| 280 |
-
uscs = "ML-OL"; uscs_expl = "Mixed silt/organic silt."
|
| 281 |
-
elif 10 <= LL <= 30 and 4 <= PI <= 7 and PI > 0.72 * (LL - 20):
|
| 282 |
-
if nDS == 1 or nDIL == 1 or nTG == 1:
|
| 283 |
-
uscs = "ML"; uscs_expl = "Silt."
|
| 284 |
-
elif nDS == 2 or nDIL == 2 or nTG == 2:
|
| 285 |
-
uscs = "CL"; uscs_expl = "Clay (low plasticity)."
|
| 286 |
-
else:
|
| 287 |
-
uscs = "ML-CL"; uscs_expl = "Mixed silt/clay."
|
| 288 |
-
else:
|
| 289 |
-
uscs = "CL"; uscs_expl = "Clay (low plasticity)."
|
| 290 |
-
else:
|
| 291 |
-
if PI < 0.73 * (LL - 20):
|
| 292 |
-
if nDS == 3 or nDIL == 4 or nTG == 4:
|
| 293 |
-
uscs = "MH"; uscs_expl = "Silt (high plasticity)"
|
| 294 |
-
elif nDS == 2 or nDIL == 2 or nTG == 4:
|
| 295 |
-
uscs = "OH"; uscs_expl = "Organic silt/clay (high plasticity)"
|
| 296 |
-
else:
|
| 297 |
-
uscs = "MH-OH"; uscs_expl = "Mixed high-plasticity silt/organic"
|
| 298 |
-
else:
|
| 299 |
-
uscs = "CH"; uscs_expl = "Clay (high plasticity)"
|
| 300 |
-
|
| 301 |
-
# AASHTO logic (verbatim)
|
| 302 |
-
if P2 <= 35:
|
| 303 |
-
if P2 <= 15 and P4 <= 30 and PI <= 6:
|
| 304 |
-
aashto = "A-1-a"
|
| 305 |
-
elif P2 <= 25 and P4 <= 50 and PI <= 6:
|
| 306 |
-
aashto = "A-1-b"
|
| 307 |
-
elif P2 <= 35 and P4 > 0:
|
| 308 |
-
if LL <= 40 and PI <= 10:
|
| 309 |
-
aashto = "A-2-4"
|
| 310 |
-
elif LL >= 41 and PI <= 10:
|
| 311 |
-
aashto = "A-2-5"
|
| 312 |
-
elif LL <= 40 and PI >= 11:
|
| 313 |
-
aashto = "A-2-6"
|
| 314 |
-
elif LL >= 41 and PI >= 11:
|
| 315 |
-
aashto = "A-2-7"
|
| 316 |
-
else:
|
| 317 |
-
aashto = "A-2"
|
| 318 |
-
else:
|
| 319 |
-
aashto = "A-3"
|
| 320 |
-
else:
|
| 321 |
-
if LL <= 40 and PI <= 10:
|
| 322 |
-
aashto = "A-4"
|
| 323 |
-
elif LL >= 41 and PI <= 10:
|
| 324 |
-
aashto = "A-5"
|
| 325 |
-
elif LL <= 40 and PI >= 11:
|
| 326 |
-
aashto = "A-6"
|
| 327 |
-
else:
|
| 328 |
-
aashto = "A-7-5" if PI <= (LL - 30) else "A-7-6"
|
| 329 |
-
|
| 330 |
-
# Group Index (GI)
|
| 331 |
-
a = P2 - 35; a = 0 if a < 0 else (40 if a > 40 else a)
|
| 332 |
-
b = P2 - 15; b = 0 if b < 0 else (40 if b > 40 else b)
|
| 333 |
-
c = LL - 40; c = 0 if c < 0 else (20 if c > 20 else c)
|
| 334 |
-
d = PI - 10; d = 0 if d < 0 else (20 if d > 20 else d)
|
| 335 |
-
GI = floor(0.2*a + 0.005*a*c + 0.01*b*d)
|
| 336 |
-
|
| 337 |
-
aashto_expl = f"{aashto} (Group Index = {GI})"
|
| 338 |
-
|
| 339 |
-
# Characteristics summary selection
|
| 340 |
-
char_summary = {}
|
| 341 |
-
if uscs.startswith("G") or uscs.startswith("S"):
|
| 342 |
-
char_summary = ENGINEERING_CHARACTERISTICS.get("Coarse sand") or ENGINEERING_CHARACTERISTICS.get("Gravel")
|
| 343 |
-
elif uscs.startswith(("M","C","O","H")):
|
| 344 |
-
char_summary = ENGINEERING_CHARACTERISTICS.get("Silt")
|
| 345 |
-
else:
|
| 346 |
-
char_summary = {"Notes":"Engineering properties need site-specific testing."}
|
| 347 |
-
|
| 348 |
-
# Compose natural text
|
| 349 |
-
result_lines = []
|
| 350 |
-
result_lines.append(f"According to USCS, the soil is **{uscs}** — {uscs_expl}")
|
| 351 |
-
result_lines.append(f"According to AASHTO, the soil is **{aashto_expl}**.")
|
| 352 |
-
result_lines.append("Engineering characteristics summary:")
|
| 353 |
-
for k,v in char_summary.items():
|
| 354 |
-
result_lines.append(f"- {k}: {v}")
|
| 355 |
|
| 356 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 357 |
|
| 358 |
-
#
|
| 359 |
-
|
| 360 |
-
def compute_and_plot_gsd(diams: List[float], passing: List[float]) -> Dict[str,Any]:
|
| 361 |
-
"""diams in mm descending, passing in %"""
|
| 362 |
-
import numpy as np
|
| 363 |
-
d = sorted(diams, reverse=True)
|
| 364 |
-
p = [max(0,min(100,float(x))) for x in passing]
|
| 365 |
-
# interpolation for D10 D30 D60
|
| 366 |
-
def interp_D(percent):
|
| 367 |
-
# percent is passing percent; we want diameter where percent passes
|
| 368 |
-
if percent <= p[-1]:
|
| 369 |
-
return d[-1]
|
| 370 |
-
if percent >= p[0]:
|
| 371 |
-
return d[0]
|
| 372 |
-
for i in range(len(p)-1):
|
| 373 |
-
if p[i] >= percent >= p[i+1]:
|
| 374 |
-
x0,x1 = p[i],p[i+1]
|
| 375 |
-
y0,y1 = d[i],d[i+1]
|
| 376 |
-
# linear interpolation in log space for better accuracy
|
| 377 |
-
if y0>0 and y1>0:
|
| 378 |
-
import math
|
| 379 |
-
logy0, logy1 = math.log(y0), math.log(y1)
|
| 380 |
-
t = (percent - x0)/(x1-x0) if x1!=x0 else 0
|
| 381 |
-
logy = logy0 + t*(logy1 - logy0)
|
| 382 |
-
return math.exp(logy)
|
| 383 |
-
else:
|
| 384 |
-
t = (percent-x0)/(x1-x0) if x1!=x0 else 0
|
| 385 |
-
return y0 + t*(y1-y0)
|
| 386 |
-
return d[-1]
|
| 387 |
-
|
| 388 |
-
D10 = interp_D(10)
|
| 389 |
-
D30 = interp_D(30)
|
| 390 |
-
D60 = interp_D(60)
|
| 391 |
-
Cu = (D60/D10) if D10>0 else None
|
| 392 |
-
Cc = ((D30**2)/(D10*D60)) if (D10>0 and D60>0) else None
|
| 393 |
-
|
| 394 |
-
fig = plt.figure(figsize=(6,3))
|
| 395 |
-
plt.semilogx(d, p, marker='o')
|
| 396 |
-
plt.gca().invert_xaxis()
|
| 397 |
-
plt.xlabel("Particle diameter (mm) [log scale]")
|
| 398 |
-
plt.ylabel("% Passing")
|
| 399 |
-
plt.title("GSD Curve")
|
| 400 |
-
plt.grid(True, which="both", ls="--", alpha=0.4)
|
| 401 |
-
plt.tight_layout()
|
| 402 |
-
|
| 403 |
-
return {"D10":D10, "D30":D30, "D60":D60, "Cu":Cu, "Cc":Cc, "fig":fig}
|
| 404 |
-
|
| 405 |
-
# 11. OCR helper (pytesseract) — optional
|
| 406 |
-
def ocr_extract_image(img_file) -> Dict[str,Any]:
|
| 407 |
try:
|
| 408 |
-
|
| 409 |
-
import pytesseract
|
| 410 |
except Exception:
|
| 411 |
-
return
|
| 412 |
-
try:
|
| 413 |
-
img = Image.open(img_file)
|
| 414 |
-
text = pytesseract.image_to_string(img)
|
| 415 |
-
# attempt to find numbers for LL, PL, sieve percentages using simple regex
|
| 416 |
-
import re
|
| 417 |
-
nums = re.findall(r"[-+]?\d*\.\d+|\d+", text)
|
| 418 |
-
return {"text":text, "numbers": nums}
|
| 419 |
-
except Exception as e:
|
| 420 |
-
return {"error":str(e)}
|
| 421 |
|
| 422 |
-
#
|
| 423 |
def sidebar_ui():
|
| 424 |
-
st.sidebar
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
| 447 |
-
|
| 448 |
-
|
| 449 |
-
|
| 450 |
-
"Site
|
| 451 |
-
|
| 452 |
-
"
|
| 453 |
-
|
| 454 |
-
|
| 455 |
-
|
| 456 |
-
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
|
| 462 |
-
|
| 463 |
-
|
| 464 |
-
|
| 465 |
-
|
| 466 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 467 |
def landing_ui():
|
| 468 |
-
st.markdown(
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
|
| 482 |
-
|
| 483 |
-
|
| 484 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 485 |
st.markdown("---")
|
| 486 |
-
st.
|
| 487 |
-
st.write(f"Sites configured: **{len(ss['sites'])}**")
|
| 488 |
-
st.write("Active site:")
|
| 489 |
-
st.write(get_active_site()["Site Name"])
|
| 490 |
|
| 491 |
-
#
|
| 492 |
-
|
| 493 |
-
|
| 494 |
-
|
| 495 |
-
img = st.file_uploader("Upload soil image", type=["png","jpg","jpeg"])
|
| 496 |
-
if img:
|
| 497 |
-
st.image(img, use_column_width=True)
|
| 498 |
-
st.success("Image received. (Model inference stub).")
|
| 499 |
-
# placeholder: run a stub classifier
|
| 500 |
-
st.markdown("**Predicted soil class (stub):** Silty clay (SC) — confidence: 0.72")
|
| 501 |
-
|
| 502 |
-
# 15. Soil Classifier — conversational wizard (chat style)
|
| 503 |
-
def soil_classifier_ui():
|
| 504 |
-
st.header("📋 Soil Classifier (Chat style)")
|
| 505 |
|
| 506 |
-
|
| 507 |
-
|
| 508 |
-
|
| 509 |
-
|
| 510 |
-
site["classifier_inputs"] = {"opt":"n","P2":0.0,"P4":0.0,"D60":0.0,"D30":0.0,"D10":0.0,"LL":0.0,"PL":0.0,"nDS":5,"nDIL":6,"nTG":6}
|
| 511 |
|
| 512 |
-
|
| 513 |
-
|
|
|
|
|
|
|
| 514 |
|
| 515 |
-
|
| 516 |
-
|
|
|
|
|
|
|
| 517 |
|
| 518 |
-
|
| 519 |
-
|
|
|
|
|
|
|
| 520 |
|
| 521 |
-
|
| 522 |
-
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
site["classifier_step"] = 1
|
| 526 |
-
st.rerun()
|
| 527 |
-
return
|
| 528 |
|
| 529 |
-
|
| 530 |
-
|
| 531 |
-
|
| 532 |
-
|
| 533 |
-
ci["opt"] = "n"
|
| 534 |
-
site["classifier_step"] = 2
|
| 535 |
-
st.rerun()
|
| 536 |
-
if col2.button("Yes"):
|
| 537 |
-
ci["opt"] = "y"
|
| 538 |
-
# If organic, we can classify as Pt and stop early
|
| 539 |
-
site["USCS"]="Pt"
|
| 540 |
-
site["AASHTO"]="Organic (special handling)"
|
| 541 |
-
site["classifier_decision_path"]="Organic branch (Pt)"
|
| 542 |
-
st.success("Soil marked as organic (Pt). Classification saved.")
|
| 543 |
-
return
|
| 544 |
-
|
| 545 |
-
if step == 2:
|
| 546 |
-
bot("Please enter the percentage passing the #200 sieve (0.075 mm). Example: 12")
|
| 547 |
-
val = st.number_input("Percentage passing #200", min_value=0.0, max_value=100.0, value=float(ci.get("P2",0.0)), step=1.0, format="%.2f", key=f"p2_{site_idx}")
|
| 548 |
-
if st.button("Confirm P2"):
|
| 549 |
-
ci["P2"]=float(val)
|
| 550 |
-
site["classifier_step"]=3
|
| 551 |
-
st.rerun()
|
| 552 |
-
return
|
| 553 |
|
| 554 |
-
|
| 555 |
-
|
| 556 |
-
|
| 557 |
-
|
| 558 |
-
|
| 559 |
-
|
| 560 |
-
|
| 561 |
-
|
| 562 |
-
|
| 563 |
-
|
| 564 |
-
|
| 565 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 566 |
|
| 567 |
-
|
| 568 |
-
|
| 569 |
-
|
| 570 |
-
if col1.button("Yes — I'll enter them"):
|
| 571 |
-
site["classifier_step"]=5
|
| 572 |
-
st.rerun()
|
| 573 |
-
if col2.button("No — I'll provide GSD later"):
|
| 574 |
-
# keep zeros
|
| 575 |
-
site["classifier_step"]=6
|
| 576 |
-
st.rerun()
|
| 577 |
-
return
|
| 578 |
|
| 579 |
-
|
| 580 |
-
|
| 581 |
-
|
| 582 |
-
|
| 583 |
-
|
| 584 |
-
|
| 585 |
-
|
| 586 |
-
|
| 587 |
-
|
| 588 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 589 |
|
| 590 |
-
|
| 591 |
-
bot("What is the Liquid Limit (LL)?")
|
| 592 |
-
LL = st.number_input("Liquid limit (LL)", min_value=0.0, max_value=200.0, value=float(ci.get("LL",0.0)), format="%.2f", key=f"ll_{site_idx}")
|
| 593 |
-
if st.button("Confirm LL"):
|
| 594 |
-
ci["LL"]=float(LL); site["classifier_step"]=7; st.rerun()
|
| 595 |
-
return
|
| 596 |
|
| 597 |
-
|
| 598 |
-
|
| 599 |
-
|
| 600 |
-
|
| 601 |
-
|
| 602 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 603 |
|
| 604 |
-
|
| 605 |
-
|
| 606 |
-
|
| 607 |
-
|
| 608 |
-
|
| 609 |
-
|
| 610 |
-
|
| 611 |
-
|
| 612 |
-
return
|
|
|
|
|
|
|
| 613 |
|
| 614 |
-
|
| 615 |
-
|
| 616 |
-
|
| 617 |
-
sel = st.selectbox("Dilatancy", options=dil_options, index=0, key=f"dil_{site_idx}")
|
| 618 |
-
dil_map = {"Quick to slow":1,"None to very slow":2,"Slow":3,"Slow to none":4,"None":5,"Null?":6}
|
| 619 |
-
if st.button("Confirm dilatancy"):
|
| 620 |
-
ci["nDIL"]=dil_map[sel]; site["classifier_step"]=10; st.rerun()
|
| 621 |
-
return
|
| 622 |
|
| 623 |
-
if
|
| 624 |
-
|
| 625 |
-
|
| 626 |
-
|
| 627 |
-
|
| 628 |
-
|
| 629 |
-
|
| 630 |
-
|
| 631 |
-
|
| 632 |
-
|
| 633 |
-
|
| 634 |
-
|
| 635 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 636 |
st.rerun()
|
| 637 |
-
return
|
| 638 |
|
| 639 |
-
if
|
| 640 |
-
st.
|
| 641 |
-
|
| 642 |
-
st.markdown(site.get("classifier_decision_path","No decision path recorded"))
|
| 643 |
-
if st.button("Export classification PDF"):
|
| 644 |
-
fn = export_classification_pdf(site)
|
| 645 |
-
with open(fn,"rb") as f:
|
| 646 |
-
st.download_button("Download classification PDF", f, file_name=fn, mime="application/pdf")
|
| 647 |
-
# allow restart
|
| 648 |
-
if st.button("🔁 Start new classification"):
|
| 649 |
-
site["classifier_step"]=1
|
| 650 |
-
site["classifier_inputs"] = {"opt":"n","P2":0.0,"P4":0.0,"D60":0.0,"D30":0.0,"D10":0.0,"LL":0.0,"PL":0.0,"nDS":5,"nDIL":6,"nTG":6}
|
| 651 |
-
st.rerun()
|
| 652 |
-
return
|
| 653 |
|
| 654 |
-
#
|
|
|
|
|
|
|
| 655 |
def gsd_curve_ui():
|
| 656 |
-
st.header("
|
| 657 |
site = get_active_site()
|
| 658 |
-
|
| 659 |
-
|
| 660 |
-
|
| 661 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 662 |
try:
|
| 663 |
-
|
| 664 |
-
passing = [
|
| 665 |
-
|
| 666 |
-
fig = out["fig"]
|
| 667 |
-
st.pyplot(fig)
|
| 668 |
-
# store values
|
| 669 |
-
site["GSD"] = {"diameters":diams, "passing":passing, "D10":out["D10"], "D30":out["D30"], "D60":out["D60"], "Cu":out["Cu"], "Cc":out["Cc"]}
|
| 670 |
-
st.success(f"Saved GSD: D10={out['D10']:.4g}, D30={out['D30']:.4g}, D60={out['D60']:.4g}")
|
| 671 |
except Exception as e:
|
| 672 |
-
st.error(f"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 673 |
|
| 674 |
-
|
| 675 |
-
|
| 676 |
-
|
| 677 |
-
|
| 678 |
-
|
| 679 |
-
EE_AVAILABLE = False
|
| 680 |
try:
|
| 681 |
-
|
| 682 |
-
|
| 683 |
-
|
| 684 |
-
|
| 685 |
-
|
| 686 |
-
|
| 687 |
-
|
| 688 |
-
|
| 689 |
-
|
| 690 |
-
|
| 691 |
-
|
| 692 |
-
|
| 693 |
-
|
| 694 |
-
|
| 695 |
-
|
| 696 |
-
|
| 697 |
-
|
| 698 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 699 |
except Exception as e:
|
| 700 |
-
|
| 701 |
|
| 702 |
-
|
| 703 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 704 |
try:
|
| 705 |
-
|
| 706 |
-
|
| 707 |
-
|
| 708 |
-
m.to_streamlit(height=500)
|
| 709 |
-
st.markdown("Use drawing tools to mark AOI then press Extract Data.")
|
| 710 |
-
if st.button("Extract Data from Earth Engine"):
|
| 711 |
-
# Here we provide STUBS and indicate where to replace with real queries
|
| 712 |
-
# Example: sample elevation from SRTM, fetch CHIRPS rainfall, seismic catalogs, flood layers...
|
| 713 |
-
try:
|
| 714 |
-
# STUB: Do some EE queries here to populate site dict.
|
| 715 |
-
site["Soil Profile"] = "Colluvial soils over weathered dolomite (EE sample)"
|
| 716 |
-
site["Flood Data"] = "No 20-year flood flagged (EE CHIRPS proxy)"
|
| 717 |
-
site["Seismic Data"] = "Historic PGA moderate (EE seismic catalog proxy)"
|
| 718 |
-
site["Topography"] = "Gentle slope; elevation approx. 250m (SRTM)"
|
| 719 |
-
st.success("Data extracted (stub). Replace STUB with real EE queries.")
|
| 720 |
-
except Exception as e:
|
| 721 |
-
st.error(f"Extraction failed: {e}")
|
| 722 |
except Exception as e:
|
| 723 |
-
st.error(f"Map
|
| 724 |
-
|
| 725 |
-
|
| 726 |
-
|
| 727 |
-
|
| 728 |
-
|
| 729 |
-
|
| 730 |
-
|
| 731 |
-
|
| 732 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 733 |
def rag_ui():
|
| 734 |
-
st.header("🤖 GeoMate Ask
|
| 735 |
site = get_active_site()
|
| 736 |
-
|
| 737 |
-
if not
|
| 738 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 739 |
if uploaded:
|
| 740 |
-
|
| 741 |
-
|
| 742 |
-
|
| 743 |
-
|
| 744 |
-
|
| 745 |
-
|
| 746 |
-
|
| 747 |
-
|
| 748 |
-
|
| 749 |
-
|
| 750 |
-
for
|
| 751 |
-
role =
|
| 752 |
-
|
| 753 |
-
|
|
|
|
| 754 |
else:
|
| 755 |
-
st.markdown(f"<div
|
| 756 |
-
|
| 757 |
-
|
| 758 |
-
|
| 759 |
-
|
| 760 |
-
|
|
|
|
| 761 |
else:
|
| 762 |
-
#
|
| 763 |
-
site
|
| 764 |
-
|
| 765 |
-
#
|
| 766 |
-
|
| 767 |
-
|
| 768 |
-
|
| 769 |
-
|
| 770 |
-
|
| 771 |
-
|
| 772 |
-
|
| 773 |
-
|
| 774 |
-
|
| 775 |
-
|
| 776 |
-
|
| 777 |
-
|
| 778 |
-
|
| 779 |
-
|
| 780 |
-
|
| 781 |
-
|
| 782 |
-
|
| 783 |
-
|
| 784 |
-
|
| 785 |
-
|
| 786 |
-
|
| 787 |
-
|
| 788 |
-
|
| 789 |
-
|
| 790 |
-
|
| 791 |
-
|
| 792 |
-
|
| 793 |
-
|
| 794 |
-
|
| 795 |
-
|
| 796 |
-
|
| 797 |
-
|
| 798 |
-
|
| 799 |
-
|
| 800 |
-
|
| 801 |
-
|
| 802 |
-
site
|
| 803 |
-
|
| 804 |
-
|
| 805 |
-
|
| 806 |
-
|
| 807 |
-
|
| 808 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 809 |
else:
|
| 810 |
-
|
| 811 |
-
st.write(site.get("classifier_decision_path"))
|
| 812 |
-
if st.button("Export Classification PDF"):
|
| 813 |
-
fn = export_classification_pdf(site)
|
| 814 |
-
with open(fn,"rb") as f:
|
| 815 |
-
st.download_button("Download Classification PDF", f, file_name=fn, mime="application/pdf")
|
| 816 |
|
| 817 |
-
|
| 818 |
-
|
| 819 |
-
|
| 820 |
-
|
| 821 |
-
|
| 822 |
-
|
| 823 |
-
|
| 824 |
-
|
| 825 |
-
|
| 826 |
-
|
| 827 |
-
|
| 828 |
-
|
| 829 |
-
|
| 830 |
-
|
| 831 |
-
|
| 832 |
-
|
| 833 |
-
|
| 834 |
-
|
| 835 |
-
|
| 836 |
-
|
| 837 |
-
|
| 838 |
-
|
| 839 |
-
|
| 840 |
-
|
| 841 |
-
|
| 842 |
-
|
| 843 |
-
|
| 844 |
-
|
| 845 |
-
|
| 846 |
-
|
| 847 |
-
|
| 848 |
-
|
| 849 |
-
|
| 850 |
-
|
| 851 |
-
|
| 852 |
-
|
| 853 |
-
|
| 854 |
-
|
| 855 |
-
|
| 856 |
-
|
| 857 |
-
|
| 858 |
-
|
| 859 |
-
|
| 860 |
-
|
| 861 |
-
|
| 862 |
-
|
| 863 |
-
|
| 864 |
-
|
| 865 |
-
|
| 866 |
-
|
| 867 |
-
|
| 868 |
-
|
| 869 |
-
|
| 870 |
-
|
| 871 |
-
|
| 872 |
-
|
| 873 |
-
|
| 874 |
-
|
| 875 |
-
|
| 876 |
-
|
| 877 |
-
|
| 878 |
-
|
| 879 |
-
|
| 880 |
-
elems.append(t)
|
| 881 |
-
doc.build(elems)
|
| 882 |
-
with open(fn,"wb") as f:
|
| 883 |
-
f.write(buf.getvalue())
|
| 884 |
-
return fn
|
| 885 |
-
except Exception:
|
| 886 |
-
pass
|
| 887 |
-
# fallback to FPDF
|
| 888 |
-
if 'FPDF' in globals() and FPDF is not None:
|
| 889 |
-
pdf = FPDF()
|
| 890 |
-
pdf.add_page()
|
| 891 |
-
pdf.set_font("Arial","B",16)
|
| 892 |
-
pdf.cell(0,10,"GeoMate Soil Classification Report",ln=True,align='C')
|
| 893 |
-
pdf.set_font("Arial","",12)
|
| 894 |
-
pdf.ln(6)
|
| 895 |
-
pdf.multi_cell(0,8, f"Site: {site.get('Site Name')}\nDate: {datetime.utcnow().strftime('%Y-%m-%d')}")
|
| 896 |
-
pdf.ln(4)
|
| 897 |
-
pdf.multi_cell(0,8, "Classification Result:")
|
| 898 |
-
pdf.multi_cell(0,8, site.get("classifier_decision_path","Not available"))
|
| 899 |
-
# inputs
|
| 900 |
-
pdf.ln(4)
|
| 901 |
-
pdf.multi_cell(0,8,"Inputs:")
|
| 902 |
-
for k,v in site.get("classifier_inputs",{}).items():
|
| 903 |
-
pdf.cell(0,6,f"- {k}: {v}", ln=True)
|
| 904 |
-
pdf.output(fn)
|
| 905 |
-
return fn
|
| 906 |
-
# last fallback: write text file
|
| 907 |
-
with open(fn.replace(".pdf",".txt"), "w") as f:
|
| 908 |
-
f.write(site.get("classifier_decision_path","Not available"))
|
| 909 |
-
return fn
|
| 910 |
-
|
| 911 |
-
def export_full_geotech_pdf(site: Dict[str,Any]) -> str:
|
| 912 |
-
fn = f"{site.get('Site Name','site')}_FullGeotech_{now_str()}.pdf"
|
| 913 |
-
# Build a detailed PDF using ReportLab if possible
|
| 914 |
-
if 'reportlab' in globals():
|
| 915 |
-
try:
|
| 916 |
-
buf = io.BytesIO()
|
| 917 |
-
doc = SimpleDocTemplate(buf, pagesize=A4, leftMargin=18*mm, rightMargin=18*mm, topMargin=18*mm, bottomMargin=18*mm)
|
| 918 |
-
styles = getSampleStyleSheet()
|
| 919 |
-
elems=[]
|
| 920 |
-
title_style = ParagraphStyle("title", parent=styles["Title"], fontSize=18, textColor=colors.HexColor("#FF7A00"))
|
| 921 |
-
elems.append(Paragraph("Full Geotechnical Investigation Report", title_style))
|
| 922 |
-
elems.append(Spacer(1,8))
|
| 923 |
-
elems.append(Paragraph(f"Site: {site.get('Site Name','-')} • Date: {datetime.utcnow().strftime('%Y-%m-%d')}", styles["Normal"]))
|
| 924 |
-
elems.append(Spacer(1,10))
|
| 925 |
-
# 1. Project & Site
|
| 926 |
-
elems.append(Paragraph("1.0 Project & Site Information", styles["Heading2"]))
|
| 927 |
-
elems.append(Paragraph(f"Location: {site.get('Site Coordinates','Not provided')}", styles["BodyText"]))
|
| 928 |
-
elems.append(Spacer(1,8))
|
| 929 |
-
# 2. Field investigation
|
| 930 |
-
elems.append(Paragraph("2.0 Field Investigation & Observations", styles["Heading2"]))
|
| 931 |
-
fi_text = f"Soil Profile: {site.get('Soil Profile','Not provided')}\nFlood Data: {site.get('Flood Data','Not provided')}\nSeismic Data: {site.get('Seismic Data','Not provided')}\nTopography: {site.get('Topography','Not provided')}"
|
| 932 |
-
elems.append(Paragraph(fi_text.replace("\n","<br/>"), styles["BodyText"]))
|
| 933 |
-
elems.append(Spacer(1,8))
|
| 934 |
-
# 3. Lab results: show GSD table if available
|
| 935 |
-
elems.append(Paragraph("3.0 Laboratory Testing", styles["Heading2"]))
|
| 936 |
-
if site.get("GSD"):
|
| 937 |
-
g = site.get("GSD")
|
| 938 |
-
elems.append(Paragraph(f"GSD D10={g.get('D10')}, D30={g.get('D30')}, D60={g.get('D60')}, Cu={g.get('Cu')}, Cc={g.get('Cc')}", styles["BodyText"]))
|
| 939 |
else:
|
| 940 |
-
|
| 941 |
-
|
| 942 |
-
|
| 943 |
-
|
| 944 |
-
|
| 945 |
-
|
| 946 |
-
|
| 947 |
-
|
| 948 |
-
|
| 949 |
-
|
| 950 |
-
|
| 951 |
-
|
| 952 |
-
|
| 953 |
-
|
| 954 |
-
|
| 955 |
-
|
| 956 |
-
|
| 957 |
-
|
| 958 |
-
|
| 959 |
-
|
| 960 |
-
|
| 961 |
-
|
| 962 |
-
|
| 963 |
-
|
| 964 |
-
|
| 965 |
-
|
| 966 |
-
|
| 967 |
-
if k in ["classifier_inputs","chat_history","classifier_decision_path"]:
|
| 968 |
-
continue
|
| 969 |
-
pdf.multi_cell(0,7,f"{k}: {v if v else 'Not Provided'}")
|
| 970 |
-
pdf.output(fn)
|
| 971 |
-
return fn
|
| 972 |
-
# else return text fallback
|
| 973 |
-
fn_txt = fn.replace(".pdf",".txt")
|
| 974 |
-
with open(fn_txt,"w") as f:
|
| 975 |
-
for k,v in site.items():
|
| 976 |
-
f.write(f"{k}: {v}\n")
|
| 977 |
-
return fn_txt
|
| 978 |
-
|
| 979 |
-
def export_dummy_report(site:Dict[str,Any]) -> str:
|
| 980 |
-
fn = f"{site.get('Site Name','site')}_Dummy_{now_str()}.pdf"
|
| 981 |
-
if 'FPDF' in globals() and FPDF is not None:
|
| 982 |
-
pdf = FPDF()
|
| 983 |
-
pdf.add_page()
|
| 984 |
-
pdf.set_font("Arial","B",18)
|
| 985 |
-
pdf.cell(0,10,"GeoMate — Dummy Geotechnical Report", ln=True, align="C")
|
| 986 |
-
pdf.ln(8)
|
| 987 |
-
pdf.set_font("Arial","",12)
|
| 988 |
-
pdf.multi_cell(0,8,"This dummy report is for layout testing. The final report will be more comprehensive and include charts, maps and tables.")
|
| 989 |
-
pdf.ln(6)
|
| 990 |
-
pdf.multi_cell(0,8,"Sample Conclusions:\n- Site is underlain by colluvial soils.\n- Recommended foundation: raft or piles depending on load.\n- Further testing (CPT, triaxial) recommended.")
|
| 991 |
-
pdf.output(fn)
|
| 992 |
-
return fn
|
| 993 |
else:
|
| 994 |
-
|
| 995 |
-
fn_txt = fn.replace(".pdf",".txt")
|
| 996 |
-
with open(fn_txt,"w") as f:
|
| 997 |
-
f.write("Dummy report (text fallback)\n")
|
| 998 |
-
return fn_txt
|
| 999 |
-
|
| 1000 |
-
# 21. Main page router
|
| 1001 |
-
PAGES = {
|
| 1002 |
-
"Landing": landing_ui,
|
| 1003 |
-
"Soil Recognizer": soil_recognizer_ui,
|
| 1004 |
-
"Soil Classifier": soil_classifier_ui,
|
| 1005 |
-
"GSD Curve": gsd_curve_ui,
|
| 1006 |
-
"Locator": locator_ui,
|
| 1007 |
-
"GeoMate Ask": rag_ui,
|
| 1008 |
-
"Reports": reports_ui
|
| 1009 |
-
}
|
| 1010 |
-
|
| 1011 |
-
def main():
|
| 1012 |
-
sidebar_ui()
|
| 1013 |
-
# top-level nav (use session page)
|
| 1014 |
-
if "active_page" not in ss:
|
| 1015 |
-
ss["active_page"]="Landing"
|
| 1016 |
-
# small nav bar at top
|
| 1017 |
-
cols = st.columns([1,3,1])
|
| 1018 |
-
with cols[1]:
|
| 1019 |
-
choice = st.selectbox("Open Page", options=list(PAGES.keys()), index=list(PAGES.keys()).index(ss["active_page"]))
|
| 1020 |
-
ss["active_page"] = choice
|
| 1021 |
-
|
| 1022 |
-
# call page function
|
| 1023 |
-
try:
|
| 1024 |
-
page_func = PAGES.get(ss["active_page"], landing_ui)
|
| 1025 |
-
page_func()
|
| 1026 |
-
except Exception as e:
|
| 1027 |
-
st.error(f"Page error: {e}\n{traceback.format_exc()}")
|
| 1028 |
|
|
|
|
| 1029 |
if __name__ == "__main__":
|
| 1030 |
-
|
|
|
|
|
|
|
|
|
| 1 |
+
# Part 1/4 of GeoMate V2 app.py
|
| 2 |
+
# -------------------------------------------------------
|
| 3 |
+
# Top: Streamlit config and imports.
|
| 4 |
+
# This part contains:
|
| 5 |
+
# - page config
|
| 6 |
+
# - imports
|
| 7 |
+
# - secrets checks
|
| 8 |
+
# - session_state initialization
|
| 9 |
+
# - sidebar + landing UI + core helpers
|
| 10 |
+
# - page function stubs (detailed implementations follow in Part 2-4)
|
| 11 |
+
# -------------------------------------------------------
|
| 12 |
+
|
| 13 |
+
# NOTE: paste Part1, then Part2, Part3, Part4 in order into a single app.py
|
| 14 |
+
|
| 15 |
+
# IMPORTANT: set_page_config must be the first Streamlit command
|
| 16 |
import streamlit as st
|
| 17 |
st.set_page_config(page_title="GeoMate V2", page_icon="🌍", layout="wide", initial_sidebar_state="expanded")
|
| 18 |
|
| 19 |
+
# Standard imports
|
| 20 |
+
import os
|
| 21 |
+
import io
|
| 22 |
+
import json
|
| 23 |
+
import time
|
| 24 |
+
import math
|
| 25 |
+
import base64
|
| 26 |
+
import textwrap
|
| 27 |
+
from typing import Any, Dict, List, Optional, Tuple
|
| 28 |
+
|
| 29 |
+
# Third-party imports (ensure in requirements.txt)
|
| 30 |
+
from streamlit_option_menu import option_menu
|
| 31 |
+
import matplotlib.pyplot as plt
|
| 32 |
|
| 33 |
+
# Attempt imports for optional integrations. If missing, app will show instruction in UI.
|
| 34 |
try:
|
| 35 |
import faiss
|
| 36 |
except Exception:
|
| 37 |
faiss = None
|
| 38 |
|
| 39 |
try:
|
| 40 |
+
import reportlab
|
| 41 |
+
from reportlab.lib import colors
|
| 42 |
+
from reportlab.lib.pagesizes import A4
|
| 43 |
+
from reportlab.lib.units import mm
|
| 44 |
+
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak, Flowable
|
| 45 |
+
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
| 46 |
except Exception:
|
| 47 |
+
reportlab = None
|
| 48 |
|
| 49 |
try:
|
|
|
|
| 50 |
import geemap
|
| 51 |
+
import ee
|
| 52 |
except Exception:
|
|
|
|
| 53 |
geemap = None
|
| 54 |
+
ee = None
|
| 55 |
|
| 56 |
try:
|
| 57 |
+
import easyocr
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
except Exception:
|
| 59 |
+
easyocr = None
|
| 60 |
+
|
| 61 |
+
# For Groq client - if not installed it will be None and UI will show an instructive error
|
| 62 |
+
try:
|
| 63 |
+
from groq import Groq
|
| 64 |
+
except Exception:
|
| 65 |
+
Groq = None
|
| 66 |
+
|
| 67 |
+
# For sentence-transformers if used locally for embedding fallback
|
| 68 |
+
try:
|
| 69 |
+
from sentence_transformers import SentenceTransformer
|
| 70 |
+
except Exception:
|
| 71 |
+
SentenceTransformer = None
|
| 72 |
+
|
| 73 |
+
# --- Secrets and environment handling ---
|
| 74 |
+
# Hugging Face Spaces: secrets can be stored in Secrets and accessed via os.environ or st.secrets
|
| 75 |
+
def _get_env_secret(key: str) -> Optional[str]:
|
| 76 |
+
# Try environment first, then st.secrets
|
| 77 |
+
val = os.environ.get(key)
|
| 78 |
+
if val:
|
| 79 |
+
return val
|
| 80 |
try:
|
| 81 |
+
return st.secrets.get(key)
|
| 82 |
except Exception:
|
| 83 |
+
return None
|
| 84 |
|
| 85 |
+
# Required secret names (as requested)
|
| 86 |
+
REQUIRED_SECRETS = ["GROQ_API_KEY", "SERVICE_ACCOUNT", "EARTH_ENGINE_KEY"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
|
| 88 |
+
missing = []
|
| 89 |
+
for sname in REQUIRED_SECRETS:
|
| 90 |
+
if not _get_env_secret(sname):
|
| 91 |
+
missing.append(sname)
|
|
|
|
| 92 |
|
| 93 |
+
# If secrets missing - show friendly error and halt
|
| 94 |
if missing:
|
| 95 |
+
st.markdown(
|
| 96 |
+
"""
|
| 97 |
+
<style>
|
| 98 |
+
.secret-error { background: #200; border-left: 6px solid #FF7A00; padding: 12px; border-radius:8px; }
|
| 99 |
+
</style>
|
| 100 |
+
""", unsafe_allow_html=True
|
| 101 |
+
)
|
| 102 |
+
st.error(
|
| 103 |
+
f"Missing required secrets: {', '.join(missing)}. "
|
| 104 |
+
"Please add them in your Hugging Face Space Secrets or in environment variables and restart the app."
|
| 105 |
+
)
|
| 106 |
st.stop()
|
| 107 |
|
| 108 |
+
# If we get here, secrets exist - read into variables
|
| 109 |
+
GROQ_API_KEY = _get_env_secret("GROQ_API_KEY")
|
| 110 |
+
SERVICE_ACCOUNT = _get_env_secret("SERVICE_ACCOUNT")
|
| 111 |
+
EARTH_ENGINE_KEY = _get_env_secret("EARTH_ENGINE_KEY") # expected to be JSON content or path
|
| 112 |
|
| 113 |
+
# Initialize Groq client (lazy) - we'll construct real client in RAG page when needed
|
| 114 |
+
def groq_client():
|
| 115 |
+
if Groq is None:
|
| 116 |
+
raise RuntimeError("groq package not installed. Add 'groq' to requirements.txt.")
|
| 117 |
+
return Groq(api_key=GROQ_API_KEY)
|
| 118 |
+
|
| 119 |
+
# --- Session state initialization ---
|
| 120 |
ss = st.session_state
|
| 121 |
+
|
| 122 |
+
# Initialize core session keys
|
| 123 |
+
if "page" not in ss:
|
| 124 |
+
ss.page = "Landing"
|
| 125 |
+
if "llm_model" not in ss:
|
| 126 |
+
# default model choices (user can change in sidebar)
|
| 127 |
+
ss.llm_model = "meta-llama/llama-4-maverick-17b-128e-instruct"
|
| 128 |
if "sites" not in ss:
|
| 129 |
+
# sites is a list of site dictionaries (max 4)
|
| 130 |
+
ss.sites = []
|
| 131 |
+
if "active_site_idx" not in ss:
|
| 132 |
+
ss.active_site_idx = 0
|
| 133 |
+
if "faiss_loaded" not in ss:
|
| 134 |
+
ss.faiss_loaded = False
|
| 135 |
+
if "faiss_index" not in ss:
|
| 136 |
+
ss.faiss_index = None
|
| 137 |
+
if "faiss_meta" not in ss:
|
| 138 |
+
ss.faiss_meta = None
|
| 139 |
+
|
| 140 |
+
# default styling variables
|
| 141 |
+
THEME = {
|
| 142 |
+
"bg": "#060606",
|
| 143 |
+
"panel": "#0b0b0b",
|
| 144 |
+
"accent": "#FF7A00",
|
| 145 |
+
"accent2": "#C62828",
|
| 146 |
+
"blue": "#1F4E79",
|
| 147 |
+
"muted": "#9aa7bf",
|
| 148 |
+
"bubble_bg": "#0f1724",
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
# helper: cap site count
|
| 152 |
+
MAX_SITES = 4
|
| 153 |
+
|
| 154 |
+
# --- Core data structure helpers ---
|
| 155 |
+
def new_empty_site(name: str = "Site") -> Dict[str, Any]:
|
| 156 |
+
"""Create a new site dict with all required fields pre-populated as None or sensible defaults."""
|
| 157 |
+
return {
|
| 158 |
+
"Site Name": name,
|
| 159 |
"Site Coordinates": "",
|
| 160 |
+
"lat": None,
|
| 161 |
+
"lon": None,
|
| 162 |
"Load Bearing Capacity": None,
|
| 163 |
"Skin Shear Strength": None,
|
| 164 |
"Relative Compaction": None,
|
|
|
|
| 174 |
"GI": None,
|
| 175 |
"classifier_inputs": {},
|
| 176 |
"classifier_decision_path": "",
|
| 177 |
+
"chat_history": [], # list of dicts {"role":"bot"|"user", "text": "..."}
|
| 178 |
"report_convo_state": 0,
|
| 179 |
"map_snapshot": None,
|
| 180 |
+
"ocr_pending": False,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 181 |
}
|
|
|
|
| 182 |
|
| 183 |
+
def get_active_site() -> Dict[str, Any]:
|
| 184 |
+
"""Return the active site dict. If none exists, create one."""
|
| 185 |
+
if not ss.sites:
|
| 186 |
+
ss.sites = [new_empty_site("Home")]
|
| 187 |
+
ss.active_site_idx = 0
|
| 188 |
+
# clamp index
|
| 189 |
+
if ss.active_site_idx < 0:
|
| 190 |
+
ss.active_site_idx = 0
|
| 191 |
+
if ss.active_site_idx >= len(ss.sites):
|
| 192 |
+
ss.active_site_idx = max(0, len(ss.sites) - 1)
|
| 193 |
+
return ss.sites[ss.active_site_idx]
|
| 194 |
+
|
| 195 |
+
def save_active_site(site_dict: Dict[str, Any]):
|
| 196 |
+
"""Save the given dict into the active site slot."""
|
| 197 |
+
if not ss.sites:
|
| 198 |
+
ss.sites = [site_dict]
|
| 199 |
+
ss.active_site_idx = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 200 |
else:
|
| 201 |
+
ss.sites[ss.active_site_idx] = site_dict
|
|
|
|
|
|
|
|
|
|
| 202 |
|
| 203 |
+
def add_site(name: str):
|
| 204 |
+
if len(ss.sites) >= MAX_SITES:
|
| 205 |
+
st.warning(f"Maximum of {MAX_SITES} sites reached.")
|
| 206 |
+
return
|
| 207 |
+
ss.sites.append(new_empty_site(name))
|
| 208 |
+
ss.active_site_idx = len(ss.sites) - 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 209 |
|
| 210 |
+
def remove_site(idx: int):
|
| 211 |
+
if idx < 0 or idx >= len(ss.sites):
|
| 212 |
+
return
|
| 213 |
+
ss.sites.pop(idx)
|
| 214 |
+
if ss.active_site_idx >= len(ss.sites):
|
| 215 |
+
ss.active_site_idx = max(0, len(ss.sites) - 1)
|
| 216 |
|
| 217 |
+
# small helper to pretty-print JSON for the site
|
| 218 |
+
def pretty_site_json(site: Dict[str, Any]) -> str:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 219 |
try:
|
| 220 |
+
return json.dumps(site, indent=2, default=str)
|
|
|
|
| 221 |
except Exception:
|
| 222 |
+
return str(site)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 223 |
|
| 224 |
+
# --- Sidebar: model selection, site management, nav (option_menu) ---
|
| 225 |
def sidebar_ui():
|
| 226 |
+
with st.sidebar:
|
| 227 |
+
st.markdown(f"<h3 style='color:{THEME['accent']};margin:6px 0;'>GeoMate V2</h3>", unsafe_allow_html=True)
|
| 228 |
+
# LLM model selector
|
| 229 |
+
st.markdown("**Select LLM model**")
|
| 230 |
+
model_choice = st.selectbox(
|
| 231 |
+
"Model",
|
| 232 |
+
options=[
|
| 233 |
+
"meta-llama/llama-4-maverick-17b-128e-instruct",
|
| 234 |
+
"llama3-8b-8192",
|
| 235 |
+
"mixtral-8x7b-32768",
|
| 236 |
+
"gemma-7b-it"
|
| 237 |
+
],
|
| 238 |
+
index=0,
|
| 239 |
+
key="sidebar_model_select"
|
| 240 |
+
)
|
| 241 |
+
ss.llm_model = model_choice
|
| 242 |
+
|
| 243 |
+
st.markdown("---")
|
| 244 |
+
st.markdown("**Project Sites**")
|
| 245 |
+
# Site add/remove
|
| 246 |
+
colA, colB = st.columns([2,1])
|
| 247 |
+
with colA:
|
| 248 |
+
# unique key per render to avoid duplicate key error
|
| 249 |
+
new_site_name = st.text_input("New site name", key="new_site_name_input")
|
| 250 |
+
with colB:
|
| 251 |
+
if st.button("➕ Add", key="add_site_btn"):
|
| 252 |
+
name = new_site_name.strip() or f"Site {len(ss.sites)+1}"
|
| 253 |
+
add_site(name)
|
| 254 |
+
st.success(f"Added site: {name}")
|
| 255 |
+
st.rerun()
|
| 256 |
+
|
| 257 |
+
# list sites and active site selector
|
| 258 |
+
if ss.sites:
|
| 259 |
+
names = [s.get("Site Name", f"Site {i+1}") for i,s in enumerate(ss.sites)]
|
| 260 |
+
asel = st.selectbox("Active Site", options=names, index=ss.active_site_idx, key="active_site_select")
|
| 261 |
+
# map selectbox selection to index
|
| 262 |
+
ss.active_site_idx = names.index(asel)
|
| 263 |
+
# remove site button
|
| 264 |
+
if st.button("🗑️ Remove active site", key="remove_site_btn"):
|
| 265 |
+
idx = ss.active_site_idx
|
| 266 |
+
removed_name = ss.sites[idx].get("Site Name","Site")
|
| 267 |
+
remove_site(idx)
|
| 268 |
+
st.success(f"Removed site {removed_name}")
|
| 269 |
+
st.rerun()
|
| 270 |
+
else:
|
| 271 |
+
st.info("No sites yet. Add one above.")
|
| 272 |
+
|
| 273 |
+
st.markdown("---")
|
| 274 |
+
# expandable JSON viewer for active site
|
| 275 |
+
with st.expander("Show active site JSON"):
|
| 276 |
+
st.code(pretty_site_json(get_active_site()), language="json")
|
| 277 |
+
|
| 278 |
+
st.markdown("---")
|
| 279 |
+
# Navigation menu
|
| 280 |
+
pages = ["Landing", "Soil Recognizer", "Soil Classifier", "GSD Curve", "Locator", "GeoMate Ask", "Reports"]
|
| 281 |
+
icons = ["house", "image", "flask", "bar-chart", "geo-alt", "robot", "file-earmark-text"]
|
| 282 |
+
choice = option_menu(
|
| 283 |
+
menu_title=None,
|
| 284 |
+
options=pages,
|
| 285 |
+
icons=icons,
|
| 286 |
+
menu_icon="cast",
|
| 287 |
+
default_index=pages.index(ss.page) if ss.page in pages else 0,
|
| 288 |
+
orientation="vertical",
|
| 289 |
+
styles={
|
| 290 |
+
"container": {"padding": "6px", "background-color": THEME["panel"]},
|
| 291 |
+
"icon": {"color": THEME["accent"], "font-size": "18px"},
|
| 292 |
+
"nav-link": {"font-size": "14px", "text-align": "left", "margin":"4px"},
|
| 293 |
+
"nav-link-selected": {"background-color": THEME["accent"], "color": "white"},
|
| 294 |
+
}
|
| 295 |
+
)
|
| 296 |
+
if choice != ss.page:
|
| 297 |
+
ss.page = choice
|
| 298 |
+
st.rerun()
|
| 299 |
+
|
| 300 |
+
st.markdown("---")
|
| 301 |
+
if st.button("Reset Session (keep secrets)"):
|
| 302 |
+
for k in list(ss.keys()):
|
| 303 |
+
if k not in ["page", "llm_model"]:
|
| 304 |
+
del ss[k]
|
| 305 |
+
# reinitialize
|
| 306 |
+
ss.sites = [new_empty_site("Home")]
|
| 307 |
+
ss.active_site_idx = 0
|
| 308 |
+
st.success("Session reset.")
|
| 309 |
+
st.rerun()
|
| 310 |
+
|
| 311 |
+
st.markdown(f"<div style='color:{THEME['muted']};font-size:12px;padding-top:6px'>GeoMate V2 — Streamlit • Multi-site • RAG + Groq</div>", unsafe_allow_html=True)
|
| 312 |
+
|
| 313 |
+
# --- Landing page UI ---
|
| 314 |
def landing_ui():
|
| 315 |
+
st.markdown(
|
| 316 |
+
f"""
|
| 317 |
+
<style>
|
| 318 |
+
.hero {{
|
| 319 |
+
background: linear-gradient(180deg, rgba(255,122,0,0.06), rgba(255,122,0,0.02));
|
| 320 |
+
border-radius: 12px;
|
| 321 |
+
padding: 18px;
|
| 322 |
+
border: 1px solid rgba(255,122,0,0.08);
|
| 323 |
+
}}
|
| 324 |
+
.globe {{
|
| 325 |
+
width:120px;height:120px;border-radius:999px;
|
| 326 |
+
background: conic-gradient({THEME['accent']}, {THEME['accent2']}, {THEME['blue']});
|
| 327 |
+
box-shadow: 0 10px 40px rgba(0,0,0,0.6);
|
| 328 |
+
display:inline-block;margin-right:18px;
|
| 329 |
+
}}
|
| 330 |
+
.cta {{
|
| 331 |
+
background: linear-gradient(90deg, {THEME['accent']}, {THEME['accent2']});
|
| 332 |
+
color: white;padding:10px 18px;border-radius:10px;border: none;
|
| 333 |
+
}}
|
| 334 |
+
</style>
|
| 335 |
+
"""
|
| 336 |
+
, unsafe_allow_html=True)
|
| 337 |
+
|
| 338 |
+
col1, col2 = st.columns([2,1])
|
| 339 |
+
with col1:
|
| 340 |
+
st.markdown("<div class='hero'>", unsafe_allow_html=True)
|
| 341 |
+
st.markdown("<div style='display:flex;align-items:center'>")
|
| 342 |
+
st.markdown("<div class='globe'></div>", unsafe_allow_html=True)
|
| 343 |
+
st.markdown("<div><h1 style='margin:0;color:#FF8C00'>GeoMate V2</h1><div style='color:#9aa7bf'>AI copilot for geotechnical engineering</div></div>", unsafe_allow_html=True)
|
| 344 |
+
st.markdown("</div>")
|
| 345 |
+
st.markdown("<hr/>", unsafe_allow_html=True)
|
| 346 |
+
st.markdown("""
|
| 347 |
+
<ul>
|
| 348 |
+
<li><b>Soil Recognizer:</b> Image-based soil detection (upload photos or use OCR).</li>
|
| 349 |
+
<li><b>Classifier:</b> Verbatim USCS & AASHTO logic (chatbot style).</li>
|
| 350 |
+
<li><b>Locator:</b> Draw AOI on map, fetch soil/flood/seismic/topography via Earth Engine.</li>
|
| 351 |
+
<li><b>GeoMate Ask:</b> RAG-enabled LLM (FAISS + Groq) with session memory per site.</li>
|
| 352 |
+
<li><b>Reports:</b> Classification-only & full Geotechnical PDF reports (styled).</li>
|
| 353 |
+
</ul>
|
| 354 |
+
""", unsafe_allow_html=True)
|
| 355 |
+
st.markdown("</div>", unsafe_allow_html=True)
|
| 356 |
+
|
| 357 |
+
st.markdown("### Quick actions")
|
| 358 |
+
c1, c2, c3 = st.columns(3)
|
| 359 |
+
if c1.button("🧪 Classifier"):
|
| 360 |
+
ss.page = "Soil Classifier"
|
| 361 |
+
st.rerun()
|
| 362 |
+
if c2.button("📊 GSD Curve"):
|
| 363 |
+
ss.page = "GSD Curve"
|
| 364 |
+
st.rerun()
|
| 365 |
+
if c3.button("🌍 Locator"):
|
| 366 |
+
ss.page = "Locator"
|
| 367 |
+
st.rerun()
|
| 368 |
+
|
| 369 |
+
with col2:
|
| 370 |
+
st.markdown("<div style='padding:12px;border-radius:10px;background:#06121a'>", unsafe_allow_html=True)
|
| 371 |
+
active = get_active_site()
|
| 372 |
+
st.markdown(f"<div style='font-size:16px;color:{THEME['accent']}'><b>Active site</b></div>", unsafe_allow_html=True)
|
| 373 |
+
st.markdown(f"<div style='font-size:14px'>{active.get('Site Name','-')}</div>", unsafe_allow_html=True)
|
| 374 |
+
st.markdown("<hr/>", unsafe_allow_html=True)
|
| 375 |
+
st.markdown(f"<div style='color:{THEME['muted']};font-size:13px'>Sites configured: <b>{len(ss.sites)}</b></div>", unsafe_allow_html=True)
|
| 376 |
+
st.markdown(f"<div style='color:{THEME['muted']};font-size:13px'>Saved classifications: <b>{len([s for s in ss.sites if s.get('USCS') or s.get('AASHTO')])}</b></div>", unsafe_allow_html=True)
|
| 377 |
+
st.markdown("</div>", unsafe_allow_html=True)
|
| 378 |
+
|
| 379 |
st.markdown("---")
|
| 380 |
+
st.info("Tip: Use the sidebar to switch pages or the quick buttons above. All data is stored in this session (up to 4 sites).")
|
|
|
|
|
|
|
|
|
|
| 381 |
|
| 382 |
+
# -----------------------------
|
| 383 |
+
# Page function stubs (detailed implementations in Parts 2-4)
|
| 384 |
+
# These are declared so the script runs as a whole when all parts are concatenated.
|
| 385 |
+
# -----------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 386 |
|
| 387 |
+
def soil_recognizer_ui():
|
| 388 |
+
"""Image-based soil recognizer — placeholder here, implemented in Part 2."""
|
| 389 |
+
st.header("Soil Recognizer")
|
| 390 |
+
st.info("Upload an image of soil. OCR and image model will extract features and suggest soil type. (Implemented in Part 2/3)")
|
|
|
|
| 391 |
|
| 392 |
+
def soil_classifier_ui():
|
| 393 |
+
"""Soil classifier (chat-style). Full implementation continues in Part 2."""
|
| 394 |
+
st.header("Soil Classifier")
|
| 395 |
+
st.info("Chat-style classifier will be displayed here. (Detailed implementation in Part 2)")
|
| 396 |
|
| 397 |
+
def gsd_curve_ui():
|
| 398 |
+
"""GSD Curve page (upload data or enter diameters/passing). Implemented in Part 2."""
|
| 399 |
+
st.header("GSD Curve")
|
| 400 |
+
st.info("Plot GSD curves, compute D10/D30/D60, Cu, Cc. (Detailed implementation in Part 2)")
|
| 401 |
|
| 402 |
+
def locator_ui():
|
| 403 |
+
"""Locator page — interactive map and Earth Engine integration. Implemented in Part 3."""
|
| 404 |
+
st.header("Locator")
|
| 405 |
+
st.info("Draw AOI, fetch soil, flood, seismic and topography data. (Implemented in Part 3)")
|
| 406 |
|
| 407 |
+
def rag_ui():
|
| 408 |
+
"""GeoMate Ask — RAG Chatbot. Implemented in Part 4."""
|
| 409 |
+
st.header("GeoMate Ask (RAG + Groq)")
|
| 410 |
+
st.info("RAG-based technical chatbot with memory per site. (Implemented in Part 4)")
|
|
|
|
|
|
|
|
|
|
| 411 |
|
| 412 |
+
def reports_ui():
|
| 413 |
+
"""Reports UI: classification-only and full geotechnical report generator. Implemented in Part 4."""
|
| 414 |
+
st.header("Reports")
|
| 415 |
+
st.info("Generate Classification-only or Full Geotechnical PDF reports. (Implemented in Part 4)")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 416 |
|
| 417 |
+
# -----------------------------
|
| 418 |
+
# Main app runner (will route to pages)
|
| 419 |
+
# -----------------------------
|
| 420 |
+
def main():
|
| 421 |
+
sidebar_ui()
|
| 422 |
+
page = ss.page if hasattr(ss, "page") else "Landing"
|
| 423 |
+
# Page routing
|
| 424 |
+
if page == "Landing":
|
| 425 |
+
landing_ui()
|
| 426 |
+
elif page == "Soil Recognizer":
|
| 427 |
+
soil_recognizer_ui()
|
| 428 |
+
elif page == "Soil Classifier":
|
| 429 |
+
soil_classifier_ui()
|
| 430 |
+
elif page == "GSD Curve":
|
| 431 |
+
gsd_curve_ui()
|
| 432 |
+
elif page == "Locator":
|
| 433 |
+
locator_ui()
|
| 434 |
+
elif page == "GeoMate Ask":
|
| 435 |
+
rag_ui()
|
| 436 |
+
elif page == "Reports":
|
| 437 |
+
reports_ui()
|
| 438 |
+
else:
|
| 439 |
+
st.warning("Unknown page. Returning to Landing.")
|
| 440 |
+
ss.page = "Landing"
|
| 441 |
+
landing_ui()
|
| 442 |
|
| 443 |
+
# Run main
|
| 444 |
+
if __name__ == "__main__":
|
| 445 |
+
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 446 |
|
| 447 |
+
# End of Part 1/4
|
| 448 |
+
# Part 2/4 of GeoMate V2 app.py
|
| 449 |
+
# -------------------------------------------------------
|
| 450 |
+
# Implements:
|
| 451 |
+
# - Soil Recognizer (OCR + image-based placeholder)
|
| 452 |
+
# - Soil Classifier (chat-style Q&A, USCS + AASHTO logic)
|
| 453 |
+
# - GSD Curve Page (CSV upload + plotting + parameter calc)
|
| 454 |
+
# -------------------------------------------------------
|
| 455 |
+
|
| 456 |
+
import numpy as np
|
| 457 |
+
import pandas as pd
|
| 458 |
+
from PIL import Image
|
| 459 |
+
|
| 460 |
+
# -------------------------------------------------------
|
| 461 |
+
# Soil Recognizer
|
| 462 |
+
# -------------------------------------------------------
|
| 463 |
+
def soil_recognizer_ui():
|
| 464 |
+
st.header("🖼️ Soil Recognizer (Image / OCR)")
|
| 465 |
+
site = get_active_site()
|
| 466 |
|
| 467 |
+
col1, col2 = st.columns(2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 468 |
|
| 469 |
+
with col1:
|
| 470 |
+
uploaded = st.file_uploader("Upload soil image", type=["jpg","jpeg","png"])
|
| 471 |
+
if uploaded:
|
| 472 |
+
img = Image.open(uploaded)
|
| 473 |
+
st.image(img, caption="Uploaded soil image", use_column_width=True)
|
| 474 |
+
# TODO: integrate your trained soil recognition model
|
| 475 |
+
st.success("Soil recognizer placeholder: model inference to be integrated.")
|
| 476 |
+
|
| 477 |
+
with col2:
|
| 478 |
+
st.subheader("📑 OCR Extraction")
|
| 479 |
+
if easyocr is None:
|
| 480 |
+
st.warning("easyocr not installed. Add `easyocr` to requirements.txt.")
|
| 481 |
+
else:
|
| 482 |
+
ocr_file = st.file_uploader("Upload photo of question/text (OCR)", type=["jpg","jpeg","png"], key="ocr_input")
|
| 483 |
+
if ocr_file:
|
| 484 |
+
reader = easyocr.Reader(['en'])
|
| 485 |
+
results = reader.readtext(np.array(Image.open(ocr_file)))
|
| 486 |
+
extracted_text = " ".join([r[1] for r in results])
|
| 487 |
+
st.text_area("Extracted text", extracted_text, height=150)
|
| 488 |
+
# TODO: parse extracted numbers for classification if possible
|
| 489 |
+
site["ocr_pending"] = True
|
| 490 |
+
save_active_site(site)
|
| 491 |
+
st.success("OCR text extracted. Parsed values will be linked to classifier soon.")
|
| 492 |
+
|
| 493 |
+
# -------------------------------------------------------
|
| 494 |
+
# Soil Classifier
|
| 495 |
+
# -------------------------------------------------------
|
| 496 |
+
|
| 497 |
+
# Helpers for USCS + AASHTO classification logic
|
| 498 |
+
def classify_uscs(inputs: Dict[str, Any]) -> Tuple[str, str]:
|
| 499 |
+
"""
|
| 500 |
+
Verbatim simplified USCS logic based on % fines, D10/30/60, LL, PL, and observations.
|
| 501 |
+
Returns (code, description).
|
| 502 |
+
"""
|
| 503 |
+
# Very simplified placeholder (expand with full decision tree)
|
| 504 |
+
fines = inputs.get("P200", 0.0)
|
| 505 |
+
if inputs.get("organic", False):
|
| 506 |
+
return "Pt", "Peat / Organic soil — compressible, poor engineering properties."
|
| 507 |
+
if fines < 5:
|
| 508 |
+
return "GW", "Well-graded gravel with excellent load-bearing capacity."
|
| 509 |
+
if fines > 50:
|
| 510 |
+
LL = inputs.get("LL", 0)
|
| 511 |
+
if LL < 50:
|
| 512 |
+
return "CL", "Low plasticity clay."
|
| 513 |
+
else:
|
| 514 |
+
return "CH", "High plasticity clay."
|
| 515 |
+
return "SM", "Silty sand with moderate engineering quality."
|
| 516 |
|
| 517 |
+
def classify_aashto(inputs: Dict[str, Any]) -> Tuple[str, str]:
|
| 518 |
+
"""
|
| 519 |
+
Simplified AASHTO classification logic.
|
| 520 |
+
"""
|
| 521 |
+
fines = inputs.get("P200", 0.0)
|
| 522 |
+
if inputs.get("organic", False):
|
| 523 |
+
return "A-8", "Organic soils (special handling required)."
|
| 524 |
+
if fines < 35:
|
| 525 |
+
return "A-1-a", "Granular material with excellent performance."
|
| 526 |
+
else:
|
| 527 |
+
return "A-7-6", "Clayey soils with poor performance unless stabilized."
|
| 528 |
|
| 529 |
+
def soil_classifier_ui():
|
| 530 |
+
st.header("🤖 Soil Classifier (Chatbot Style)")
|
| 531 |
+
site = get_active_site()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 532 |
|
| 533 |
+
if "classifier_state" not in site:
|
| 534 |
+
site["classifier_state"] = 0
|
| 535 |
+
site["classifier_inputs"] = {}
|
| 536 |
+
site["classifier_chat"] = []
|
| 537 |
+
|
| 538 |
+
chat = site["classifier_chat"]
|
| 539 |
+
|
| 540 |
+
def add_bot(msg: str):
|
| 541 |
+
chat.append(["bot", msg])
|
| 542 |
+
def add_user(msg: str):
|
| 543 |
+
chat.append(["user", msg])
|
| 544 |
+
|
| 545 |
+
# Chat rendering
|
| 546 |
+
for role, msg in chat:
|
| 547 |
+
bubble_color = THEME["bubble_bg"] if role=="bot" else "#1f2a44"
|
| 548 |
+
border = f"2px solid {THEME['accent']}" if role=="bot" else "1px solid #333"
|
| 549 |
+
st.markdown(f"""
|
| 550 |
+
<div style='margin:6px 0;padding:8px 12px;background:{bubble_color};
|
| 551 |
+
border-radius:14px;border:{border};max-width:80%;'>
|
| 552 |
+
<b>{'🤖' if role=='bot' else '👤'}:</b> {msg}
|
| 553 |
+
</div>
|
| 554 |
+
""", unsafe_allow_html=True)
|
| 555 |
+
|
| 556 |
+
# State machine
|
| 557 |
+
state = site["classifier_state"]
|
| 558 |
+
inputs = site["classifier_inputs"]
|
| 559 |
+
|
| 560 |
+
def ask(question: str):
|
| 561 |
+
add_bot(question)
|
| 562 |
+
site["classifier_state"] += 1
|
| 563 |
+
save_active_site(site)
|
| 564 |
+
st.rerun()
|
| 565 |
+
|
| 566 |
+
# Initial Q
|
| 567 |
+
if state == 0 and not chat:
|
| 568 |
+
ask("Hello — I am the GeoMate Soil Classifier. Is the soil organic (spongy, dark, odorous)? (y/n)")
|
| 569 |
+
|
| 570 |
+
# User input
|
| 571 |
+
user_in = st.text_input("Your answer:", key=f"classifier_input_{state}")
|
| 572 |
+
if st.button("➡️ Submit", key=f"classifier_submit_{state}"):
|
| 573 |
+
if user_in.strip():
|
| 574 |
+
add_user(user_in.strip())
|
| 575 |
+
# Logic branch
|
| 576 |
+
if state == 1: # organic q
|
| 577 |
+
if user_in.lower().startswith("y"):
|
| 578 |
+
inputs["organic"] = True
|
| 579 |
+
site["USCS"], desc1 = classify_uscs(inputs)
|
| 580 |
+
site["AASHTO"], desc2 = classify_aashto(inputs)
|
| 581 |
+
add_bot(f"Classification complete: USCS={site['USCS']} ({desc1}), AASHTO={site['AASHTO']} ({desc2})")
|
| 582 |
+
site["classifier_state"] = -1
|
| 583 |
+
else:
|
| 584 |
+
inputs["organic"] = False
|
| 585 |
+
ask("What is the % passing the #200 sieve (0.075 mm)?")
|
| 586 |
+
elif state == 2:
|
| 587 |
+
try:
|
| 588 |
+
inputs["P200"] = float(user_in)
|
| 589 |
+
except: inputs["P200"] = 0.0
|
| 590 |
+
if inputs["P200"] < 5:
|
| 591 |
+
ask("What is the % passing the sieve no. 4 (4.75 mm)?")
|
| 592 |
+
else:
|
| 593 |
+
ask("What is the Liquid Limit (LL)?")
|
| 594 |
+
elif state == 3:
|
| 595 |
+
try:
|
| 596 |
+
inputs["P4"] = float(user_in)
|
| 597 |
+
except: inputs["P4"] = 0.0
|
| 598 |
+
ask("Do you know the D10, D30, D60 values? (y/n)")
|
| 599 |
+
elif state == 4:
|
| 600 |
+
if user_in.lower().startswith("y"):
|
| 601 |
+
ask("Enter D60 (mm):")
|
| 602 |
+
else:
|
| 603 |
+
ask("Enter Liquid Limit (LL):")
|
| 604 |
+
elif state == 5:
|
| 605 |
+
try: inputs["D60"] = float(user_in)
|
| 606 |
+
except: inputs["D60"] = 0.0
|
| 607 |
+
ask("Enter D30 (mm):")
|
| 608 |
+
elif state == 6:
|
| 609 |
+
try: inputs["D30"] = float(user_in)
|
| 610 |
+
except: inputs["D30"] = 0.0
|
| 611 |
+
ask("Enter D10 (mm):")
|
| 612 |
+
elif state == 7:
|
| 613 |
+
try: inputs["D10"] = float(user_in)
|
| 614 |
+
except: inputs["D10"] = 0.0
|
| 615 |
+
ask("Enter Liquid Limit (LL):")
|
| 616 |
+
elif state == 8:
|
| 617 |
+
try: inputs["LL"] = float(user_in)
|
| 618 |
+
except: inputs["LL"] = 0.0
|
| 619 |
+
ask("Enter Plastic Limit (PL):")
|
| 620 |
+
elif state == 9:
|
| 621 |
+
try: inputs["PL"] = float(user_in)
|
| 622 |
+
except: inputs["PL"] = 0.0
|
| 623 |
+
# classify now
|
| 624 |
+
site["USCS"], desc1 = classify_uscs(inputs)
|
| 625 |
+
site["AASHTO"], desc2 = classify_aashto(inputs)
|
| 626 |
+
add_bot(f"Classification complete: USCS={site['USCS']} ({desc1}), AASHTO={site['AASHTO']} ({desc2})")
|
| 627 |
+
site["classifier_state"] = -1
|
| 628 |
+
save_active_site(site)
|
| 629 |
st.rerun()
|
|
|
|
| 630 |
|
| 631 |
+
if site["classifier_state"] == -1:
|
| 632 |
+
if st.button("📄 Export Classification Report"):
|
| 633 |
+
st.success("Report export will be in Reports page.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 634 |
|
| 635 |
+
# -------------------------------------------------------
|
| 636 |
+
# GSD Curve Page
|
| 637 |
+
# -------------------------------------------------------
|
| 638 |
def gsd_curve_ui():
|
| 639 |
+
st.header("📊 Grain Size Distribution (GSD) Curve")
|
| 640 |
site = get_active_site()
|
| 641 |
+
|
| 642 |
+
st.info("Upload sieve analysis data (CSV: Sieve size [mm], %Passing). Or manually enter D-values.")
|
| 643 |
+
|
| 644 |
+
uploaded = st.file_uploader("Upload CSV", type=["csv"], key="gsd_csv")
|
| 645 |
+
data = None
|
| 646 |
+
if uploaded:
|
| 647 |
+
df = pd.read_csv(uploaded)
|
| 648 |
+
st.write(df)
|
| 649 |
try:
|
| 650 |
+
sizes = df.iloc[:,0].values
|
| 651 |
+
passing = df.iloc[:,1].values
|
| 652 |
+
data = (sizes, passing)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 653 |
except Exception as e:
|
| 654 |
+
st.error(f"Error parsing CSV: {e}")
|
| 655 |
+
|
| 656 |
+
if data is not None:
|
| 657 |
+
sizes, passing = data
|
| 658 |
+
# Plot
|
| 659 |
+
fig, ax = plt.subplots()
|
| 660 |
+
ax.semilogx(sizes, passing, marker="o", color="orange")
|
| 661 |
+
ax.set_xlabel("Sieve Size (mm, log scale)")
|
| 662 |
+
ax.set_ylabel("% Passing")
|
| 663 |
+
ax.set_title("Grain Size Distribution Curve")
|
| 664 |
+
ax.grid(True, which="both", linestyle="--", linewidth=0.5)
|
| 665 |
+
st.pyplot(fig)
|
| 666 |
+
|
| 667 |
+
# Interpolate D10, D30, D60
|
| 668 |
+
def interpD(target):
|
| 669 |
+
return np.interp(target, passing[::-1], sizes[::-1])
|
| 670 |
+
D10 = interpD(10)
|
| 671 |
+
D30 = interpD(30)
|
| 672 |
+
D60 = interpD(60)
|
| 673 |
+
Cu = D60/D10 if D10>0 else None
|
| 674 |
+
Cc = (D30**2)/(D60*D10) if D10>0 and D60>0 else None
|
| 675 |
+
|
| 676 |
+
st.write(f"D10={D10:.3f} mm, D30={D30:.3f} mm, D60={D60:.3f} mm")
|
| 677 |
+
st.write(f"Cu={Cu:.2f}, Cc={Cc:.2f}")
|
| 678 |
+
|
| 679 |
+
site["GSD"] = {"D10":D10,"D30":D30,"D60":D60,"Cu":Cu,"Cc":Cc}
|
| 680 |
+
save_active_site(site)
|
| 681 |
+
|
| 682 |
+
with st.expander("Manual entry"):
|
| 683 |
+
c1,c2,c3 = st.columns(3)
|
| 684 |
+
D10 = c1.number_input("D10 (mm)", value=0.0)
|
| 685 |
+
D30 = c2.number_input("D30 (mm)", value=0.0)
|
| 686 |
+
D60 = c3.number_input("D60 (mm)", value=0.0)
|
| 687 |
+
if st.button("Save D-values"):
|
| 688 |
+
site["GSD"] = {"D10":D10,"D30":D30,"D60":D60}
|
| 689 |
+
save_active_site(site)
|
| 690 |
+
st.success("Saved to site.")
|
| 691 |
+
|
| 692 |
+
# End of Part 2/4
|
| 693 |
+
# Part 3/4 of GeoMate V2 app.py
|
| 694 |
+
# -------------------------------------------------------
|
| 695 |
+
# Implements:
|
| 696 |
+
# - Locator Page with EE + geemap
|
| 697 |
+
# - Extracts flood, seismic, soil, topography data
|
| 698 |
+
# -------------------------------------------------------
|
| 699 |
+
|
| 700 |
+
import geemap.foliumap as geemap
|
| 701 |
+
import ee
|
| 702 |
+
|
| 703 |
+
# Try init Earth Engine
|
| 704 |
+
EE_READY = False
|
| 705 |
+
try:
|
| 706 |
+
if "EARTH_ENGINE_KEY" in st.secrets:
|
| 707 |
+
import json
|
| 708 |
+
key = st.secrets["EARTH_ENGINE_KEY"]
|
| 709 |
+
if isinstance(key, str):
|
| 710 |
+
key = json.loads(key)
|
| 711 |
+
credentials = ee.ServiceAccountCredentials(st.secrets["SERVICE_ACCOUNT"], key)
|
| 712 |
+
ee.Initialize(credentials)
|
| 713 |
+
EE_READY = True
|
| 714 |
+
else:
|
| 715 |
+
st.warning("EARTH_ENGINE_KEY not found in secrets → Locator limited.")
|
| 716 |
+
except Exception as e:
|
| 717 |
+
st.error(f"EE init failed: {e}")
|
| 718 |
+
EE_READY = False
|
| 719 |
|
| 720 |
+
|
| 721 |
+
# -------------------------------------------------------
|
| 722 |
+
# Helper EE datasets
|
| 723 |
+
# -------------------------------------------------------
|
| 724 |
+
def fetch_flood_data(aoi):
|
|
|
|
| 725 |
try:
|
| 726 |
+
flood = ee.ImageCollection("JRC/GSW1_4/MonthlyHistory").select("water").mean()
|
| 727 |
+
stats = flood.reduceRegion(reducer=ee.Reducer.mean(), geometry=aoi, scale=30, maxPixels=1e9)
|
| 728 |
+
return stats.getInfo()
|
| 729 |
+
except Exception as e:
|
| 730 |
+
return {"error": str(e)}
|
| 731 |
+
|
| 732 |
+
def fetch_seismic_data(aoi):
|
| 733 |
+
try:
|
| 734 |
+
seismic = ee.Image("USGS/GME/hazards/seismic/2013_PGA_10pct_50yr")
|
| 735 |
+
stats = seismic.reduceRegion(reducer=ee.Reducer.mean(), geometry=aoi, scale=1000, maxPixels=1e9)
|
| 736 |
+
return stats.getInfo()
|
| 737 |
+
except Exception as e:
|
| 738 |
+
return {"error": str(e)}
|
| 739 |
+
|
| 740 |
+
def fetch_topography_data(aoi):
|
| 741 |
+
try:
|
| 742 |
+
dem = ee.Image("USGS/SRTMGL1_003")
|
| 743 |
+
stats = dem.reduceRegion(reducer=ee.Reducer.mean(), geometry=aoi, scale=90, maxPixels=1e9)
|
| 744 |
+
return stats.getInfo()
|
| 745 |
+
except Exception as e:
|
| 746 |
+
return {"error": str(e)}
|
| 747 |
+
|
| 748 |
+
def fetch_soil_data(aoi):
|
| 749 |
+
try:
|
| 750 |
+
soil = ee.Image("OpenLandMap/SOL/SOL_TEXTURE-CLASS_USDA-TT_M/v02")
|
| 751 |
+
stats = soil.reduceRegion(reducer=ee.Reducer.mode(), geometry=aoi, scale=250, maxPixels=1e9)
|
| 752 |
+
return stats.getInfo()
|
| 753 |
except Exception as e:
|
| 754 |
+
return {"error": str(e)}
|
| 755 |
|
| 756 |
+
# -------------------------------------------------------
|
| 757 |
+
# Locator Page
|
| 758 |
+
# -------------------------------------------------------
|
| 759 |
+
def locator_ui():
|
| 760 |
+
st.header("🌍 Locator (Earth Engine Powered)")
|
| 761 |
+
site = get_active_site()
|
| 762 |
+
|
| 763 |
+
if not EE_READY:
|
| 764 |
+
st.warning("Earth Engine not ready. Map disabled.")
|
| 765 |
+
return
|
| 766 |
+
|
| 767 |
+
# Create map
|
| 768 |
+
m = geemap.Map(center=[20, 78], zoom=4, plugin_Draw=True, Draw_export=True, locate_control=True)
|
| 769 |
+
m.add_basemap("HYBRID")
|
| 770 |
+
|
| 771 |
+
with st.expander("📌 Instructions"):
|
| 772 |
+
st.markdown("""
|
| 773 |
+
- Pan/zoom to your site
|
| 774 |
+
- Use the draw tool (rectangle/circle/polygon) to mark AOI
|
| 775 |
+
- Or click the crosshair to auto-locate
|
| 776 |
+
- Data will be extracted from Earth Engine
|
| 777 |
+
""")
|
| 778 |
+
|
| 779 |
+
# Show map in Streamlit
|
| 780 |
+
m.to_streamlit(height=500)
|
| 781 |
+
|
| 782 |
+
if m.user_roi_bounds():
|
| 783 |
+
coords = m.user_roi_bounds()
|
| 784 |
+
site["Coordinates"] = coords
|
| 785 |
+
poly = ee.Geometry.Polygon(coords)
|
| 786 |
+
|
| 787 |
+
# Extract data
|
| 788 |
+
flood = fetch_flood_data(poly)
|
| 789 |
+
seismic = fetch_seismic_data(poly)
|
| 790 |
+
topo = fetch_topography_data(poly)
|
| 791 |
+
soil = fetch_soil_data(poly)
|
| 792 |
+
|
| 793 |
+
site["Flood Data"] = flood
|
| 794 |
+
site["Seismic Data"] = seismic
|
| 795 |
+
site["Topography"] = topo
|
| 796 |
+
site["Soil Profile"] = soil
|
| 797 |
+
|
| 798 |
+
save_active_site(site)
|
| 799 |
+
|
| 800 |
+
st.success("✅ Data extracted and saved to site.")
|
| 801 |
+
st.json({
|
| 802 |
+
"Flood": flood,
|
| 803 |
+
"Seismic": seismic,
|
| 804 |
+
"Topography": topo,
|
| 805 |
+
"Soil": soil
|
| 806 |
+
})
|
| 807 |
+
|
| 808 |
+
# Take snapshot for report
|
| 809 |
try:
|
| 810 |
+
map_html = m.to_html()
|
| 811 |
+
site["map_snapshot"] = map_html
|
| 812 |
+
save_active_site(site)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 813 |
except Exception as e:
|
| 814 |
+
st.error(f"Map snapshot error: {e}")
|
| 815 |
+
# Part 4/4 of GeoMate V2 app.py
|
| 816 |
+
# -------------------------------------------------------
|
| 817 |
+
# Implements:
|
| 818 |
+
# - RAG: FAISS + Groq chat (per-site memory)
|
| 819 |
+
# - Entity extraction placeholder to auto-save parameters from chat
|
| 820 |
+
# - Reports: Classification-only PDF + Full Geotechnical Report PDF
|
| 821 |
+
# - Final UI glue: Reports page and main app routing
|
| 822 |
+
# -------------------------------------------------------
|
| 823 |
+
|
| 824 |
+
import os
|
| 825 |
+
import io
|
| 826 |
+
import zipfile
|
| 827 |
+
import json
|
| 828 |
+
import tempfile
|
| 829 |
+
import base64
|
| 830 |
+
import pickle
|
| 831 |
+
from datetime import datetime
|
| 832 |
+
import textwrap
|
| 833 |
+
|
| 834 |
+
# LLM client (Groq)
|
| 835 |
+
try:
|
| 836 |
+
from groq import Groq
|
| 837 |
+
except Exception:
|
| 838 |
+
Groq = None
|
| 839 |
+
|
| 840 |
+
# FAISS
|
| 841 |
+
try:
|
| 842 |
+
import faiss
|
| 843 |
+
except Exception:
|
| 844 |
+
faiss = None
|
| 845 |
+
|
| 846 |
+
# PDF (ReportLab)
|
| 847 |
+
from reportlab.lib import colors
|
| 848 |
+
from reportlab.lib.pagesizes import A4, landscape
|
| 849 |
+
from reportlab.lib.units import mm
|
| 850 |
+
from reportlab.platypus import (
|
| 851 |
+
SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, Image as RLImage, PageBreak
|
| 852 |
+
)
|
| 853 |
+
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
| 854 |
+
|
| 855 |
+
# Matplotlib for embedding GSD plot images into PDFs
|
| 856 |
+
import matplotlib.pyplot as plt
|
| 857 |
+
|
| 858 |
+
# --------------------------
|
| 859 |
+
# Basic secret checks (run early)
|
| 860 |
+
# --------------------------
|
| 861 |
+
# Using st.secrets for HF Spaces; fallback to env
|
| 862 |
+
GROQ_KEY = os.getenv("GROQ_API_KEY") or (st.secrets.get("GROQ_API_KEY") if "GROQ_API_KEY" in st.secrets else None)
|
| 863 |
+
SERVICE_ACCOUNT = os.getenv("SERVICE_ACCOUNT") or (st.secrets.get("SERVICE_ACCOUNT") if "SERVICE_ACCOUNT" in st.secrets else None)
|
| 864 |
+
EARTH_ENGINE_KEY = os.getenv("EARTH_ENGINE_KEY") or (st.secrets.get("EARTH_ENGINE_KEY") if "EARTH_ENGINE_KEY" in st.secrets else None)
|
| 865 |
+
|
| 866 |
+
missing = []
|
| 867 |
+
if not GROQ_KEY:
|
| 868 |
+
missing.append("GROQ_API_KEY")
|
| 869 |
+
if not SERVICE_ACCOUNT:
|
| 870 |
+
missing.append("SERVICE_ACCOUNT")
|
| 871 |
+
if not EARTH_ENGINE_KEY:
|
| 872 |
+
# Don't require EARTH_ENGINE_KEY strictly (locator will warn) but warn user
|
| 873 |
+
st.warning("EARTH_ENGINE_KEY not found in secrets. Locator functions will be limited or disabled.")
|
| 874 |
+
|
| 875 |
+
if missing:
|
| 876 |
+
st.error(f"Missing required secrets: {', '.join(missing)}. Please add them to Hugging Face Secrets and reload the app.")
|
| 877 |
+
st.stop()
|
| 878 |
+
|
| 879 |
+
# Initialize Groq client
|
| 880 |
+
if Groq is None:
|
| 881 |
+
st.warning("Groq client lib not installed (pip package 'groq'). RAG will be disabled.")
|
| 882 |
+
GROQ_CLIENT = None
|
| 883 |
+
else:
|
| 884 |
+
try:
|
| 885 |
+
GROQ_CLIENT = Groq(api_key=GROQ_KEY)
|
| 886 |
+
except Exception as e:
|
| 887 |
+
st.error(f"Failed to create Groq client: {e}")
|
| 888 |
+
GROQ_CLIENT = None
|
| 889 |
+
|
| 890 |
+
# --------------------------
|
| 891 |
+
# Session-state helpers (ensure present)
|
| 892 |
+
# --------------------------
|
| 893 |
+
if "sites" not in ss:
|
| 894 |
+
ss["sites"] = [{"Site Name": "Home", "chat_history": [], "classifier_inputs": {}, "classifier_state": 0,
|
| 895 |
+
"GSD": None, "USCS": None, "AASHTO": None, "GI": None,
|
| 896 |
+
"map_snapshot": None, "report_convo_state": 0, "soil_profile": None}]
|
| 897 |
+
|
| 898 |
+
if "active_site_idx" not in ss:
|
| 899 |
+
ss["active_site_idx"] = 0
|
| 900 |
+
|
| 901 |
+
def get_active_site():
|
| 902 |
+
idx = ss.get("active_site_idx", 0)
|
| 903 |
+
sites = ss.get("sites", [])
|
| 904 |
+
if idx < 0 or idx >= len(sites):
|
| 905 |
+
ss["active_site_idx"] = 0
|
| 906 |
+
idx = 0
|
| 907 |
+
return sites[idx]
|
| 908 |
+
|
| 909 |
+
def save_active_site(site):
|
| 910 |
+
idx = ss.get("active_site_idx", 0)
|
| 911 |
+
ss["sites"][idx] = site
|
| 912 |
+
# ensure persistence in session_state
|
| 913 |
+
ss.modified = True
|
| 914 |
+
|
| 915 |
+
def add_site(name="New Site"):
|
| 916 |
+
if len(ss["sites"]) >= 4:
|
| 917 |
+
st.warning("Maximum 4 sites allowed.")
|
| 918 |
+
return
|
| 919 |
+
ss["sites"].append({"Site Name": name, "chat_history": [], "classifier_inputs": {}, "classifier_state": 0,
|
| 920 |
+
"GSD": None, "USCS": None, "AASHTO": None, "GI": None,
|
| 921 |
+
"map_snapshot": None, "report_convo_state": 0, "soil_profile": None})
|
| 922 |
+
ss["active_site_idx"] = len(ss["sites"]) - 1
|
| 923 |
+
|
| 924 |
+
def remove_site(idx):
|
| 925 |
+
if 0 <= idx < len(ss["sites"]):
|
| 926 |
+
ss["sites"].pop(idx)
|
| 927 |
+
ss["active_site_idx"] = max(0, ss["active_site_idx"] - 1)
|
| 928 |
+
|
| 929 |
+
# --------------------------
|
| 930 |
+
# FAISS DB loader (expects a zip containing index.faiss + meta.pkl)
|
| 931 |
+
# --------------------------
|
| 932 |
+
def load_faiss_db_from_zip(zip_path: str):
|
| 933 |
+
"""
|
| 934 |
+
Accepts path to a zip file containing index.faiss and meta.pkl (or index.pkl).
|
| 935 |
+
Extracts to tempdir and loads faiss index + metadata list.
|
| 936 |
+
Returns (index, meta_list) or (None, None) on error.
|
| 937 |
+
"""
|
| 938 |
+
if faiss is None:
|
| 939 |
+
st.error("faiss not installed. RAG unavailable.")
|
| 940 |
+
return None, None
|
| 941 |
+
|
| 942 |
+
if not os.path.exists(zip_path):
|
| 943 |
+
st.error("FAISS DB zip not found at provided path.")
|
| 944 |
+
return None, None
|
| 945 |
+
tmpd = tempfile.mkdtemp()
|
| 946 |
+
try:
|
| 947 |
+
with zipfile.ZipFile(zip_path, "r") as z:
|
| 948 |
+
z.extractall(tmpd)
|
| 949 |
+
# look for index.faiss or index.pkl
|
| 950 |
+
idx_file = None
|
| 951 |
+
meta_file = None
|
| 952 |
+
for fname in os.listdir(tmpd):
|
| 953 |
+
if fname.endswith(".faiss") or fname == "index.faiss":
|
| 954 |
+
idx_file = os.path.join(tmpd, fname)
|
| 955 |
+
if fname.endswith(".pkl") or fname == "meta.pkl":
|
| 956 |
+
meta_file = os.path.join(tmpd, fname)
|
| 957 |
+
if idx_file is None or meta_file is None:
|
| 958 |
+
st.error("Zip did not contain index.faiss and meta.pkl.")
|
| 959 |
+
return None, None
|
| 960 |
+
index = faiss.read_index(idx_file)
|
| 961 |
+
with open(meta_file, "rb") as f:
|
| 962 |
+
meta = pickle.load(f)
|
| 963 |
+
return index, meta
|
| 964 |
+
except Exception as e:
|
| 965 |
+
st.error(f"Failed to load FAISS DB: {e}")
|
| 966 |
+
return None, None
|
| 967 |
+
|
| 968 |
+
# --------------------------
|
| 969 |
+
# RAG helper: simple retriever + Groq caller
|
| 970 |
+
# --------------------------
|
| 971 |
+
def rag_retrieve_and_answer(query: str, topk: int = 5):
|
| 972 |
+
"""
|
| 973 |
+
Retrieve from FAISS (if loaded) and call Groq with context.
|
| 974 |
+
"""
|
| 975 |
+
# Load index from session if present
|
| 976 |
+
if "faiss_index" not in ss or ss.get("faiss_index") is None:
|
| 977 |
+
st.error("FAISS index not loaded. Please upload faiss_books_db.zip in GeoMate Ask page.")
|
| 978 |
+
return "FAISS DB missing."
|
| 979 |
+
|
| 980 |
+
index = ss["faiss_index"]
|
| 981 |
+
meta = ss["faiss_meta"]
|
| 982 |
+
# prepare embedding for query — simplified: use Groq's embed endpoint if available.
|
| 983 |
+
# For demo, we'll simply return top-K metadata concatenated as context
|
| 984 |
+
try:
|
| 985 |
+
# nearest neighbor search using user-provided embeddings if available
|
| 986 |
+
# Here: assume index is IndexFlatL2 and meta is list
|
| 987 |
+
# For safety, we will not compute embeddings here (requires sentence-transformers).
|
| 988 |
+
# We'll do a cheap fallback: return top K metas (or first K) as context.
|
| 989 |
+
context_texts = [m.get("text","") for m in (meta[:topk] if isinstance(meta, list) else meta)]
|
| 990 |
+
context = "\n\n".join(context_texts)
|
| 991 |
+
except Exception:
|
| 992 |
+
context = ""
|
| 993 |
+
|
| 994 |
+
# Build prompt for Groq
|
| 995 |
+
system = "You are GeoMate RAG assistant. Use the context to answer precisely and professionally."
|
| 996 |
+
user_prompt = f"Context:\n{context}\n\nQuestion: {query}\nAnswer concisely and cite context sections if relevant."
|
| 997 |
+
|
| 998 |
+
if GROQ_CLIENT is None:
|
| 999 |
+
# fallback: simple echo
|
| 1000 |
+
return "Groq client not available. Cannot complete RAG call."
|
| 1001 |
+
try:
|
| 1002 |
+
completion = GROQ_CLIENT.chat.completions.create(
|
| 1003 |
+
model="meta-llama/llama-4-maverick-17b-128e-instruct",
|
| 1004 |
+
messages=[{"role":"system","content":system},{"role":"user","content":user_prompt}],
|
| 1005 |
+
temperature=0.2,
|
| 1006 |
+
max_tokens=800
|
| 1007 |
+
)
|
| 1008 |
+
text = completion.choices[0].message.content
|
| 1009 |
+
return text
|
| 1010 |
+
except Exception as e:
|
| 1011 |
+
st.error(f"Groq call failed: {e}")
|
| 1012 |
+
return "RAG call failed."
|
| 1013 |
+
|
| 1014 |
+
# --------------------------
|
| 1015 |
+
# Entity extraction placeholder
|
| 1016 |
+
# --------------------------
|
| 1017 |
+
def update_site_description_from_text(site: dict, text: str) -> dict:
|
| 1018 |
+
"""
|
| 1019 |
+
Very simple regex-based extraction for core engineering parameters from free text.
|
| 1020 |
+
Called after each RAG/chat answer: extracts numbers like 'bearing capacity 2000 psf' etc.
|
| 1021 |
+
This is a placeholder; can be replaced by a proper NER model.
|
| 1022 |
+
"""
|
| 1023 |
+
import re
|
| 1024 |
+
# load-bearing capacity (psf or kPa)
|
| 1025 |
+
m = re.search(r"bearing capacity\s*(?:of)?\s*([0-9,.]+)\s*(kpa|psf|pa|kn/m2)?", text, re.IGNORECASE)
|
| 1026 |
+
if m:
|
| 1027 |
+
val = m.group(1).replace(",", "")
|
| 1028 |
+
unit = m.group(2) or ""
|
| 1029 |
+
site["Load Bearing Capacity"] = f"{val} {unit}".strip()
|
| 1030 |
+
|
| 1031 |
+
# skin shear strength
|
| 1032 |
+
m2 = re.search(r"skin shear strength\s*(?:[:is]*)\s*([0-9,.]+)\s*(kpa|kn/m2|psf)?", text, re.IGNORECASE)
|
| 1033 |
+
if m2:
|
| 1034 |
+
site["Skin Shear Strength"] = f"{m2.group(1).replace(',','')} { (m2.group(2) or '')}".strip()
|
| 1035 |
+
|
| 1036 |
+
# % compaction
|
| 1037 |
+
m3 = re.search(r"compaction\s*(?:[:is]*)\s*([0-9]{1,3})\s*%", text, re.IGNORECASE)
|
| 1038 |
+
if m3:
|
| 1039 |
+
site["Relative Compaction"] = f"{m3.group(1)}%"
|
| 1040 |
+
|
| 1041 |
+
# rate of consolidation
|
| 1042 |
+
m4 = re.search(r"consolidation rate\s*(?:[:is]*)\s*([0-9,.]+)\s*(mm/year|mm/yr|mm per year|m/year)?", text, re.IGNORECASE)
|
| 1043 |
+
if m4:
|
| 1044 |
+
site["Rate of Consolidation"] = f"{m4.group(1)} {m4.group(2) or ''}".strip()
|
| 1045 |
+
|
| 1046 |
+
# nature of construction - look for keywords
|
| 1047 |
+
if "residential" in text.lower():
|
| 1048 |
+
site["Nature of Construction"] = "Residential"
|
| 1049 |
+
elif "commercial" in text.lower():
|
| 1050 |
+
site["Nature of Construction"] = "Commercial"
|
| 1051 |
+
elif "pavement" in text.lower() or "road" in text.lower():
|
| 1052 |
+
site["Nature of Construction"] = "Pavement / Road"
|
| 1053 |
+
|
| 1054 |
+
return site
|
| 1055 |
+
|
| 1056 |
+
# --------------------------
|
| 1057 |
+
# GeoMate Ask (RAG Chat) UI
|
| 1058 |
+
# --------------------------
|
| 1059 |
def rag_ui():
|
| 1060 |
+
st.header("🤖 GeoMate Ask — RAG + Groq (per-site memory)")
|
| 1061 |
site = get_active_site()
|
| 1062 |
+
|
| 1063 |
+
if "chat_history" not in site:
|
| 1064 |
+
site["chat_history"] = []
|
| 1065 |
+
|
| 1066 |
+
st.markdown("**Context:** The RAG uses your FAISS knowledge base (upload .zip in this page) and Groq LLM for answers. Chat history is saved for this site during the session.")
|
| 1067 |
+
|
| 1068 |
+
# FAISS DB upload (one-time)
|
| 1069 |
+
with st.expander("FAISS DB (index.faiss + meta.pkl inside a zip)"):
|
| 1070 |
+
uploaded = st.file_uploader("Upload faiss_books_db.zip", type=["zip"])
|
| 1071 |
if uploaded:
|
| 1072 |
+
tmpf = tempfile.NamedTemporaryFile(delete=False, suffix=".zip")
|
| 1073 |
+
tmpf.write(uploaded.getvalue())
|
| 1074 |
+
tmpf.flush()
|
| 1075 |
+
ix, meta = load_faiss_db_from_zip(tmpf.name)
|
| 1076 |
+
if ix is not None:
|
| 1077 |
+
ss["faiss_index"] = ix
|
| 1078 |
+
ss["faiss_meta"] = meta
|
| 1079 |
+
st.success("FAISS DB loaded.")
|
| 1080 |
+
|
| 1081 |
+
# Render chat history
|
| 1082 |
+
for turn in site.get("chat_history", []):
|
| 1083 |
+
role = turn.get("role")
|
| 1084 |
+
text = turn.get("text")
|
| 1085 |
+
if role == "bot":
|
| 1086 |
+
st.markdown(f"<div style='background:{THEME['bubble_bg']};padding:8px;border-radius:12px;border:2px solid {THEME['accent']};'><b>🤖 GeoMate:</b> {text}</div>", unsafe_allow_html=True)
|
| 1087 |
else:
|
| 1088 |
+
st.markdown(f"<div style='background:#1a2436;color:#fff;padding:8px;border-radius:12px;margin-left:40px;'><b>👤 You:</b> {text}</div>", unsafe_allow_html=True)
|
| 1089 |
+
|
| 1090 |
+
# Input box
|
| 1091 |
+
user_q = st.text_input("Ask GeoMate (RAG + site memory):", key="geomate_rag_input")
|
| 1092 |
+
if st.button("Ask", key="geomate_rag_button"):
|
| 1093 |
+
if not user_q.strip():
|
| 1094 |
+
st.warning("Type a question first.")
|
| 1095 |
else:
|
| 1096 |
+
# Append to history
|
| 1097 |
+
site["chat_history"].append({"role":"user","text":user_q, "time":datetime.utcnow().isoformat()})
|
| 1098 |
+
save_active_site(site)
|
| 1099 |
+
# Retrieve + call LLM
|
| 1100 |
+
with st.spinner("Retrieving context and calling LLM..."):
|
| 1101 |
+
answer = rag_retrieve_and_answer(user_q, topk=5)
|
| 1102 |
+
# Append bot answer
|
| 1103 |
+
site["chat_history"].append({"role":"bot","text":answer, "time":datetime.utcnow().isoformat()})
|
| 1104 |
+
# Try to extract any engineering parameters from answer or the user question
|
| 1105 |
+
site = update_site_description_from_text(site, user_q + "\n" + answer)
|
| 1106 |
+
save_active_site(site)
|
| 1107 |
+
st.experimental_rerun() # small rerun to display new messages
|
| 1108 |
+
|
| 1109 |
+
# Quick buttons
|
| 1110 |
+
colA, colB = st.columns(2)
|
| 1111 |
+
if colA.button("Save Chat to Site JSON"):
|
| 1112 |
+
save_active_site(site)
|
| 1113 |
+
st.success("Saved chat into site JSON.")
|
| 1114 |
+
if colB.button("Clear Site Chat"):
|
| 1115 |
+
site["chat_history"] = []
|
| 1116 |
+
save_active_site(site)
|
| 1117 |
+
st.success("Cleared history for this site.")
|
| 1118 |
+
|
| 1119 |
+
# --------------------------
|
| 1120 |
+
# REPORTS: PDF builders
|
| 1121 |
+
# --------------------------
|
| 1122 |
+
def build_classification_pdf_bytes(site: dict):
|
| 1123 |
+
"""
|
| 1124 |
+
Build classification-only PDF (returns bytes)
|
| 1125 |
+
"""
|
| 1126 |
+
buf = io.BytesIO()
|
| 1127 |
+
doc = SimpleDocTemplate(buf, pagesize=A4, leftMargin=20*mm, rightMargin=20*mm, topMargin=20*mm, bottomMargin=20*mm)
|
| 1128 |
+
styles = getSampleStyleSheet()
|
| 1129 |
+
title = ParagraphStyle("title", parent=styles["Title"], fontSize=20, textColor=THEME["accent"], alignment=1)
|
| 1130 |
+
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=THEME["accent"])
|
| 1131 |
+
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10)
|
| 1132 |
+
|
| 1133 |
+
elems = []
|
| 1134 |
+
elems.append(Paragraph("Classification Report — GeoMate V2", title))
|
| 1135 |
+
elems.append(Spacer(1,8))
|
| 1136 |
+
elems.append(Paragraph(f"Site: {site.get('Site Name','-')}", h1))
|
| 1137 |
+
elems.append(Spacer(1,6))
|
| 1138 |
+
|
| 1139 |
+
# Inputs table
|
| 1140 |
+
inputs = site.get("classifier_inputs", {})
|
| 1141 |
+
data = [["Parameter","Value"]]
|
| 1142 |
+
for k,v in inputs.items():
|
| 1143 |
+
data.append([k, str(v)])
|
| 1144 |
+
t = Table(data, colWidths=[80*mm, 80*mm])
|
| 1145 |
+
t.setStyle(TableStyle([("GRID",(0,0),(-1,-1),0.5,colors.grey),
|
| 1146 |
+
("BACKGROUND",(0,0),(-1,0),THEME["accent"]),
|
| 1147 |
+
("TEXTCOLOR",(0,0),(-1,0),colors.white)]))
|
| 1148 |
+
elems.append(t)
|
| 1149 |
+
elems.append(Spacer(1,8))
|
| 1150 |
+
|
| 1151 |
+
# Results
|
| 1152 |
+
elems.append(Paragraph("Results", h1))
|
| 1153 |
+
elems.append(Paragraph(f"USCS: {site.get('USCS','N/A')}", body))
|
| 1154 |
+
elems.append(Paragraph(f"AASHTO: {site.get('AASHTO','N/A')} (GI: {site.get('GI','N/A')})", body))
|
| 1155 |
+
elems.append(Spacer(1,8))
|
| 1156 |
+
|
| 1157 |
+
# GSD curve inclusion if present
|
| 1158 |
+
gsd = site.get("GSD")
|
| 1159 |
+
if gsd:
|
| 1160 |
+
elems.append(Paragraph("GSD Parameters", h1))
|
| 1161 |
+
elems.append(Paragraph(f"D10: {gsd.get('D10')}, D30: {gsd.get('D30')}, D60: {gsd.get('D60')}", body))
|
| 1162 |
+
# If a plot image is stored in site, include (we store last plot as /tmp/gsd_plot.png)
|
| 1163 |
+
gsd_img_path = "/tmp/geomate_gsd_plot.png"
|
| 1164 |
+
if os.path.exists(gsd_img_path):
|
| 1165 |
+
elems.append(Spacer(1,6))
|
| 1166 |
+
elems.append(RLImage(gsd_img_path, width=150*mm, height=80*mm))
|
| 1167 |
+
elems.append(Spacer(1,10))
|
| 1168 |
+
elems.append(Paragraph("Decision path", h1))
|
| 1169 |
+
elems.append(Paragraph(site.get("classifier_decision_path","Not recorded"), body))
|
| 1170 |
+
doc.build(elems)
|
| 1171 |
+
pdf = buf.getvalue()
|
| 1172 |
+
buf.close()
|
| 1173 |
+
return pdf
|
| 1174 |
+
|
| 1175 |
+
def build_full_geotech_pdf_bytes(sites_list: list, external_refs: list):
|
| 1176 |
+
"""
|
| 1177 |
+
Build a full geotechnical report covering all selected sites.
|
| 1178 |
+
sites_list: list of site dictionaries
|
| 1179 |
+
Returns bytes of PDF.
|
| 1180 |
+
"""
|
| 1181 |
+
buf = io.BytesIO()
|
| 1182 |
+
doc = SimpleDocTemplate(buf, pagesize=A4, leftMargin=20*mm, rightMargin=20*mm, topMargin=20*mm, bottomMargin=20*mm)
|
| 1183 |
+
styles = getSampleStyleSheet()
|
| 1184 |
+
title = ParagraphStyle("title", parent=styles["Title"], fontSize=20, textColor=THEME["accent"], alignment=1)
|
| 1185 |
+
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=THEME["accent"])
|
| 1186 |
+
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10)
|
| 1187 |
+
|
| 1188 |
+
elems = []
|
| 1189 |
+
elems.append(Paragraph("Full Geotechnical Investigation Report — GeoMate V2", title))
|
| 1190 |
+
elems.append(Spacer(1,6))
|
| 1191 |
+
elems.append(Paragraph(f"Date: {datetime.today().strftime('%Y-%m-%d')}", body))
|
| 1192 |
+
elems.append(Spacer(1,10))
|
| 1193 |
+
|
| 1194 |
+
# For each site: include summary, field data, lab results, GSD, map link (if snapshot exists)
|
| 1195 |
+
for s in sites_list:
|
| 1196 |
+
elems.append(Paragraph(f"Site: {s.get('Site Name','Unnamed')}", h1))
|
| 1197 |
+
elems.append(Paragraph(f"Location: {s.get('Coordinates','Not provided')}", body))
|
| 1198 |
+
elems.append(Spacer(1,6))
|
| 1199 |
+
elems.append(Paragraph("1. Site Description & Geology", body))
|
| 1200 |
+
elems.append(Paragraph(s.get("site_description","Not provided"), body))
|
| 1201 |
+
elems.append(Spacer(1,6))
|
| 1202 |
+
|
| 1203 |
+
elems.append(Paragraph("2. Field Investigation & Laboratory Testing", body))
|
| 1204 |
+
# show available fields
|
| 1205 |
+
lines = [
|
| 1206 |
+
f"Load Bearing Capacity: {s.get('Load Bearing Capacity','Not provided')}",
|
| 1207 |
+
f"Skin Shear Strength: {s.get('Skin Shear Strength','Not provided')}",
|
| 1208 |
+
f"Relative Compaction: {s.get('Relative Compaction','Not provided')}",
|
| 1209 |
+
f"Rate of Consolidation: {s.get('Rate of Consolidation','Not provided')}",
|
| 1210 |
+
f"Nature of Construction: {s.get('Nature of Construction','Not provided')}"
|
| 1211 |
+
]
|
| 1212 |
+
for L in lines:
|
| 1213 |
+
elems.append(Paragraph(L, body))
|
| 1214 |
+
elems.append(Spacer(1,8))
|
| 1215 |
+
|
| 1216 |
+
# GSD & plot
|
| 1217 |
+
gsd = s.get("GSD")
|
| 1218 |
+
if gsd:
|
| 1219 |
+
elems.append(Paragraph("3. Grain Size Distribution", body))
|
| 1220 |
+
elems.append(Paragraph(f"D10: {gsd.get('D10')}, D30: {gsd.get('D30')}, D60: {gsd.get('D60')}, Cu: {gsd.get('Cu')}, Cc: {gsd.get('Cc')}", body))
|
| 1221 |
+
# Include saved image path if exists
|
| 1222 |
+
gsd_img = "/tmp/geomate_gsd_plot.png"
|
| 1223 |
+
if os.path.exists(gsd_img):
|
| 1224 |
+
elems.append(Spacer(1,6))
|
| 1225 |
+
elems.append(RLImage(gsd_img, width=150*mm, height=80*mm))
|
| 1226 |
+
elems.append(Spacer(1,8))
|
| 1227 |
+
|
| 1228 |
+
# Earth Engine extracted data
|
| 1229 |
+
elems.append(Paragraph("4. Locator-derived Data", body))
|
| 1230 |
+
elems.append(Paragraph(f"Flood Data: {json.dumps(s.get('Flood Data','Not provided'))[:300]}", body))
|
| 1231 |
+
elems.append(Paragraph(f"Seismic Data: {json.dumps(s.get('Seismic Data','Not provided'))[:300]}", body))
|
| 1232 |
+
elems.append(Paragraph(f"Topography: {json.dumps(s.get('Topography','Not provided'))[:300]}", body))
|
| 1233 |
+
elems.append(Spacer(1,8))
|
| 1234 |
+
|
| 1235 |
+
# Recommendations (simple placeholder text derived from site properties)
|
| 1236 |
+
elems.append(Paragraph("5. Recommendations (preliminary)", body))
|
| 1237 |
+
# Basic logic to create recommendations
|
| 1238 |
+
if s.get("USCS") and s.get("USCS").startswith("C"):
|
| 1239 |
+
elems.append(Paragraph(" - Soils have clayey character; consider consolidation and settlement checks. Use stiffened raft or piles for heavy loads.", body))
|
| 1240 |
else:
|
| 1241 |
+
elems.append(Paragraph(" - Soils are likely granular; shallow foundations possible with suitable compaction and drainage.", body))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1242 |
|
| 1243 |
+
elems.append(PageBreak())
|
| 1244 |
+
|
| 1245 |
+
# External references
|
| 1246 |
+
if external_refs:
|
| 1247 |
+
elems.append(Paragraph("References", h1))
|
| 1248 |
+
for r in external_refs:
|
| 1249 |
+
elems.append(Paragraph(r, body))
|
| 1250 |
+
|
| 1251 |
+
doc.build(elems)
|
| 1252 |
+
pdf = buf.getvalue()
|
| 1253 |
+
buf.close()
|
| 1254 |
+
return pdf
|
| 1255 |
+
|
| 1256 |
+
# --------------------------
|
| 1257 |
+
# REPORTS UI
|
| 1258 |
+
# --------------------------
|
| 1259 |
+
def reports_ui():
|
| 1260 |
+
st.header("📑 Reports — Classification-only & Full Geotechnical Report")
|
| 1261 |
+
|
| 1262 |
+
# Classification-only
|
| 1263 |
+
st.subheader("Classification-only report")
|
| 1264 |
+
sites = ss.get("sites", [])
|
| 1265 |
+
site_names = [s.get("Site Name","Unnamed") for s in sites]
|
| 1266 |
+
sel_cls = st.selectbox("Select site", site_names, index=ss.get("active_site_idx",0))
|
| 1267 |
+
if st.button("Generate Classification PDF"):
|
| 1268 |
+
site = ss["sites"][site_names.index(sel_cls)]
|
| 1269 |
+
pdf_bytes = build_classification_pdf_bytes(site)
|
| 1270 |
+
st.download_button("Download Classification PDF", data=pdf_bytes, file_name=f"classification_{sel_cls}.pdf", mime="application/pdf")
|
| 1271 |
+
|
| 1272 |
+
st.markdown("---")
|
| 1273 |
+
|
| 1274 |
+
# Full report
|
| 1275 |
+
st.subheader("Full Geotechnical Report")
|
| 1276 |
+
selected = st.multiselect("Sites to include", site_names, default=site_names)
|
| 1277 |
+
ext_refs_text = st.text_area("External references (one per line)")
|
| 1278 |
+
if st.button("Generate Full Report PDF"):
|
| 1279 |
+
if not selected:
|
| 1280 |
+
st.error("Select at least one site.")
|
| 1281 |
+
else:
|
| 1282 |
+
chosen_sites = [ss["sites"][site_names.index(n)] for n in selected]
|
| 1283 |
+
ext_refs = [l.strip() for l in ext_refs_text.splitlines() if l.strip()]
|
| 1284 |
+
with st.spinner("Building PDF (this may take a few seconds)..."):
|
| 1285 |
+
pdf_bytes = build_full_geotech_pdf_bytes(chosen_sites, ext_refs)
|
| 1286 |
+
st.download_button("Download Full Geotechnical Report", data=pdf_bytes, file_name="geomate_full_report.pdf", mime="application/pdf")
|
| 1287 |
+
|
| 1288 |
+
# --------------------------
|
| 1289 |
+
# Final UI main function (glue)
|
| 1290 |
+
# --------------------------
|
| 1291 |
+
def ui_main_final():
|
| 1292 |
+
# Sidebar (model selection + project sites)
|
| 1293 |
+
with st.sidebar:
|
| 1294 |
+
st.markdown(f"<h3 style='color:{THEME['accent']};margin:6px 0;'>GeoMate V2</h3>", unsafe_allow_html=True)
|
| 1295 |
+
model = st.selectbox("Select LLM model", ["meta-llama/llama-4-maverick-17b-128e-instruct", "llama3-8b-8192", "gemma-7b-it"], index=0)
|
| 1296 |
+
ss["selected_model"] = model
|
| 1297 |
+
|
| 1298 |
+
st.markdown("### Project Sites")
|
| 1299 |
+
# Add / remove / select sites
|
| 1300 |
+
cols = st.columns([3,1])
|
| 1301 |
+
new_site_name = cols[0].text_input("New site name", key="sidebar_new_site_name")
|
| 1302 |
+
if cols[1].button("➕ Add"):
|
| 1303 |
+
if new_site_name.strip():
|
| 1304 |
+
add_site(new_site_name.strip())
|
| 1305 |
+
st.success(f"Added site {new_site_name.strip()}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1306 |
else:
|
| 1307 |
+
add_site(f"Site-{len(ss['sites'])+1}")
|
| 1308 |
+
st.success("Added new site")
|
| 1309 |
+
|
| 1310 |
+
st.markdown("Active site:")
|
| 1311 |
+
idx = st.radio("Select active site", options=list(range(len(ss["sites"]))), format_func=lambda i: ss["sites"][i].get("Site Name","Site"), index=ss.get("active_site_idx",0))
|
| 1312 |
+
ss["active_site_idx"] = idx
|
| 1313 |
+
|
| 1314 |
+
with st.expander("Show active site JSON"):
|
| 1315 |
+
st.json(ss["sites"][ss["active_site_idx"]])
|
| 1316 |
+
|
| 1317 |
+
# Page routing (ss['page'] set by earlier parts' option_menu)
|
| 1318 |
+
page = ss.get("page","Landing")
|
| 1319 |
+
if page == "Landing":
|
| 1320 |
+
landing_ui()
|
| 1321 |
+
elif page == "Soil Recognizer":
|
| 1322 |
+
soil_recognizer_ui()
|
| 1323 |
+
elif page == "Soil Classifier":
|
| 1324 |
+
# Use the classifier UI from Part 2
|
| 1325 |
+
soil_classifier_ui()
|
| 1326 |
+
elif page == "GSD Curve":
|
| 1327 |
+
gsd_curve_ui()
|
| 1328 |
+
elif page == "Locator":
|
| 1329 |
+
locator_ui()
|
| 1330 |
+
elif page == "GeoMate Ask":
|
| 1331 |
+
rag_ui()
|
| 1332 |
+
elif page == "Reports":
|
| 1333 |
+
reports_ui()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1334 |
else:
|
| 1335 |
+
st.info("Select a page from the sidebar.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1336 |
|
| 1337 |
+
# Run
|
| 1338 |
if __name__ == "__main__":
|
| 1339 |
+
ui_main_final()
|
| 1340 |
+
|
| 1341 |
+
# End of Part 4/4
|