""" GraphWiz Ireland - Advanced GraphRAG Chat Application Complete rewrite with hybrid search, GraphRAG, Groq LLM, and instant responses """ import streamlit as st import os import sys import time import subprocess from pathlib import Path # Run version checker on first load (only in production) if os.path.exists('/app'): version_check_file = Path('/app/check_versions.py') if version_check_file.exists() and not os.environ.get('VERSION_CHECK_DONE'): try: result = subprocess.run( [sys.executable, str(version_check_file)], capture_output=True, text=True, timeout=30 ) print("=== VERSION CHECK OUTPUT ===") print(result.stdout) if result.stderr: print("=== VERSION CHECK ERRORS ===") print(result.stderr) os.environ['VERSION_CHECK_DONE'] = '1' except Exception as e: print(f"Version check failed: {e}") # Now import application modules from rag_engine import IrelandRAGEngine from dataset_loader import ensure_dataset_files import json # Load environment variables from .env file env_file = Path(__file__).parent.parent / '.env' if env_file.exists(): with open(env_file) as f: for line in f: line = line.strip() if line and not line.startswith('#') and '=' in line: key, value = line.split('=', 1) os.environ[key.strip()] = value.strip() # Page configuration st.set_page_config( page_title="GraphWiz Ireland - Intelligent Q&A", page_icon="đŽđĒ", layout="wide", initial_sidebar_state="expanded" ) # Custom CSS for better UI st.markdown(""" """, unsafe_allow_html=True) # Initialize RAG Engine (cached) @st.cache_resource def load_rag_engine(): """Load and cache RAG engine""" try: groq_api_key = os.getenv("GROQ_API_KEY") if not groq_api_key: st.error("â ī¸ GROQ_API_KEY not found in environment variables. Please set it to use the application.") st.info("Get your free API key at: https://console.groq.com/") st.stop() # Ensure dataset files are downloaded from HF Datasets if needed # Create a container for download progress that will be cleared after completion download_container = st.container() success, files_downloaded = ensure_dataset_files(progress_container=download_container) if not success: st.error("â ī¸ Failed to load dataset files from Hugging Face Datasets.") st.info("Please check your internet connection and try again.") st.stop() engine = IrelandRAGEngine( chunks_file="dataset/wikipedia_ireland/chunks.json", graphrag_index_file="dataset/wikipedia_ireland/graphrag_index.json", groq_api_key=groq_api_key, groq_model="llama-3.3-70b-versatile", use_cache=True ) return engine except FileNotFoundError as e: st.error(f"â ī¸ Data files not found: {e}") st.info("Dataset files should be automatically downloaded from Hugging Face Datasets.\n" "If the issue persists, please check your internet connection.") st.stop() except Exception as e: st.error(f"â ī¸ Error loading RAG engine: {e}") st.stop() # Main header st.markdown('
Intelligent Q&A System powered by GraphRAG, Hybrid Search, and Groq LLM
""", unsafe_allow_html=True) # Load RAG engine with st.spinner("đ Loading GraphWiz Engine..."): engine = load_rag_engine() # Sidebar with st.sidebar: st.markdown("### âī¸ Settings") # Retrieval settings st.markdown("#### Retrieval Configuration") top_k = st.slider("Number of sources to retrieve", 3, 15, 5, help="More sources = more context but slower") semantic_weight = st.slider("Semantic search weight", 0.0, 1.0, 0.7, 0.1, help="Higher = prioritize meaning over keywords") keyword_weight = 1.0 - semantic_weight # Advanced options with st.expander("Advanced Options"): use_community = st.checkbox("Use community context", value=True, help="Include related topic clusters") show_debug = st.checkbox("Show debug information", value=False, help="Display retrieval details") st.markdown("---") # Statistics st.markdown("#### đ System Statistics") stats = engine.get_stats() col1, col2 = st.columns(2) with col1: st.metric("Knowledge Chunks", f"{stats['total_chunks']:,}") with col2: st.metric("Topic Communities", stats['total_communities']) cache_stats = stats['cache_stats'] st.metric("Cache Hit Rate", cache_stats['hit_rate']) st.caption(f"Hits: {cache_stats['cache_hits']} | Misses: {cache_stats['cache_misses']}") if st.button("đī¸ Clear Cache"): engine.clear_cache() st.success("Cache cleared!") st.rerun() st.markdown("---") # Info st.markdown("#### âšī¸ About") st.info(""" **GraphWiz Ireland** uses: - đ Hybrid search (semantic + keyword) - đ¸ī¸ GraphRAG with community detection - ⥠Groq LLM (ultra-fast inference) - đž Smart caching for instant responses - đ Comprehensive Wikipedia data """) st.markdown("---") st.caption("Built with Streamlit, FAISS, NetworkX, Groq, and spaCy") # Suggested questions st.markdown("### đĄ Try These Questions") suggested_questions = [ "What is the capital of Ireland?", "When did Ireland join the European Union?", "Who is the current president of Ireland?", "What is the oldest university in Ireland?", "Tell me about the history of Dublin", "What are the major cities in Ireland?", "Explain the Irish language and its history", "What is Ireland's economy based on?", "Describe Irish mythology and folklore", "What are the main political parties in Ireland?" ] # Display suggested questions as buttons in columns cols = st.columns(3) for idx, question in enumerate(suggested_questions): with cols[idx % 3]: if st.button(question, key=f"suggested_{idx}", use_container_width=True): st.session_state.question = question # Question input st.markdown("### đ Ask Your Question") question = st.text_input( "Enter your question about Ireland:", value=st.session_state.get('question', ''), placeholder="e.g., What is the history of Irish independence?", key="question_input" ) # Search button and results if st.button("đ Search", type="primary") or question: if question and question.strip(): # Display searching indicator with st.spinner("đ Searching knowledge base..."): # Query the RAG engine result = engine.answer_question( question=question, top_k=top_k, semantic_weight=semantic_weight, keyword_weight=keyword_weight, use_community_context=use_community, return_debug_info=show_debug ) # Display results st.markdown("---") # Response time and cache status col1, col2, col3 = st.columns([2, 1, 1]) with col1: cache_indicator = "đž Cached" if result['cached'] else "đ Fresh" st.caption(f"{cache_indicator} | Response time: {result['response_time']:.2f}s") with col2: st.caption(f"Retrieval: {result['retrieval_time']:.2f}s") with col3: st.caption(f"Generation: {result['generation_time']:.2f}s") # Answer st.markdown("### đŦ Answer") st.markdown(f'GraphWiz Ireland | Powered by Wikipedia, GraphRAG, and Groq | GitHub
""", unsafe_allow_html=True)