al1kss commited on
Commit
04934a2
·
verified ·
1 Parent(s): 4637be1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +187 -8
app.py CHANGED
@@ -720,13 +720,14 @@ async def chat_general(
720
 
721
 
722
  # Custom AI Chat
 
723
  @app.post("/chat/custom/{ai_id}", response_model=QuestionResponse)
724
  async def chat_custom_ai(
725
  ai_id: str,
726
  request: QuestionRequest,
727
  current_user: dict = Depends(get_current_user)
728
  ):
729
- """Chat with custom AI"""
730
  if not lightrag_manager:
731
  raise HTTPException(status_code=503, detail="LightRAG system not initialized")
732
 
@@ -734,20 +735,198 @@ async def chat_custom_ai(
734
  # Generate conversation ID if not provided
735
  conversation_id = request.conversation_id or str(uuid.uuid4())
736
 
737
- # Query with persistent LightRAG
738
- response = await lightrag_manager.query_with_memory(
 
739
  ai_type="custom",
740
  question=request.question,
741
  conversation_id=conversation_id,
742
  user_id=current_user["id"],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
743
  ai_id=ai_id,
744
- mode=request.mode or "hybrid"
 
 
 
745
  )
746
- await update_message_stats(request.question, response)
 
747
  return QuestionResponse(
748
- answer=response,
749
- mode=request.mode or "hybrid",
750
- status="success",
751
  conversation_id=conversation_id
752
  )
753
 
 
720
 
721
 
722
  # Custom AI Chat
723
+ # Custom AI Chat - FIXED VERSION with Fallback
724
  @app.post("/chat/custom/{ai_id}", response_model=QuestionResponse)
725
  async def chat_custom_ai(
726
  ai_id: str,
727
  request: QuestionRequest,
728
  current_user: dict = Depends(get_current_user)
729
  ):
730
+ """Chat with custom AI using fallback modes"""
731
  if not lightrag_manager:
732
  raise HTTPException(status_code=503, detail="LightRAG system not initialized")
733
 
 
735
  # Generate conversation ID if not provided
736
  conversation_id = request.conversation_id or str(uuid.uuid4())
737
 
738
+ # Query with fallback - SAME AS FIRE SAFETY
739
+ result = await query_rag_with_fallback(
740
+ lightrag_manager=lightrag_manager,
741
  ai_type="custom",
742
  question=request.question,
743
  conversation_id=conversation_id,
744
  user_id=current_user["id"],
745
+ ai_id=ai_id, # This is the key difference - pass ai_id
746
+ preferred_mode=request.mode or "hybrid"
747
+ )
748
+
749
+ await update_message_stats(request.question, result["answer"])
750
+ return QuestionResponse(
751
+ answer=result["answer"],
752
+ mode=result["mode"],
753
+ status=result["status"],
754
+ conversation_id=conversation_id
755
+ )
756
+
757
+ except Exception as e:
758
+ logger.error(f"Custom AI chat error: {e}")
759
+ raise HTTPException(status_code=500, detail=f"Chat error: {str(e)}")
760
+
761
+
762
+ # Alternative: Enhanced fallback system specifically for custom AIs
763
+ async def query_custom_ai_with_fallback(
764
+ lightrag_manager: PersistentLightRAGManager,
765
+ ai_id: str,
766
+ question: str,
767
+ conversation_id: str,
768
+ user_id: str,
769
+ preferred_mode: str = "hybrid"
770
+ ) -> Dict[str, Any]:
771
+ """Query custom AI with automatic fallback to working modes and Cloudflare backup"""
772
+
773
+ # Try modes in order of preference
774
+ fallback_modes = ["hybrid", "local", "global", "naive"]
775
+
776
+ # Start with user's preferred mode
777
+ if preferred_mode in fallback_modes:
778
+ fallback_modes.remove(preferred_mode)
779
+ fallback_modes.insert(0, preferred_mode)
780
+
781
+ last_error = None
782
+
783
+ for mode in fallback_modes:
784
+ try:
785
+ logger.info(f"🔍 Trying {mode} mode for custom AI {ai_id}")
786
+
787
+ response = await lightrag_manager.query_with_memory(
788
+ ai_type="custom",
789
+ question=question,
790
+ conversation_id=conversation_id,
791
+ user_id=user_id,
792
+ ai_id=ai_id,
793
+ mode=mode
794
+ )
795
+
796
+ # Check if response is valid (not a "Sorry" message)
797
+ if response and len(response.strip()) > 20 and not response.startswith("Sorry"):
798
+ logger.info(f"✅ {mode} mode worked for custom AI {ai_id}")
799
+ return {
800
+ "answer": response,
801
+ "mode": mode,
802
+ "status": "success",
803
+ "fallback_used": mode != preferred_mode
804
+ }
805
+ else:
806
+ logger.warning(f"⚠️ {mode} mode returned empty/error response for custom AI {ai_id}")
807
+ last_error = f"{mode} mode returned: {response[:100]}..."
808
+
809
+ except Exception as e:
810
+ logger.warning(f"⚠️ {mode} mode failed for custom AI {ai_id}: {e}")
811
+ last_error = str(e)
812
+ continue
813
+
814
+ # If all LightRAG modes fail, fallback to Cloudflare AI
815
+ try:
816
+ logger.info(f"🔄 All LightRAG modes failed for custom AI {ai_id}, falling back to Cloudflare AI")
817
+
818
+ # Get custom AI details for context
819
+ async with lightrag_manager.db.pool.acquire() as conn:
820
+ ai_details = await conn.fetchrow("""
821
+ SELECT name, description FROM rag_instances
822
+ WHERE ai_id = $1 AND ai_type = 'custom' AND status = 'active'
823
+ """, ai_id)
824
+
825
+ # Create context-aware system prompt
826
+ ai_name = ai_details['name'] if ai_details else "Custom AI"
827
+ ai_description = ai_details['description'] if ai_details else "a custom AI assistant"
828
+
829
+ system_prompt = f"""You are {ai_name}, {ai_description}.
830
+
831
+ Although you don't have access to your specific knowledge base right now,
832
+ provide the best general assistance you can. Be helpful, accurate, and
833
+ acknowledge that you're operating in a general mode without your specialized knowledge."""
834
+
835
+ fallback_response = await lightrag_manager.cloudflare_worker.query(
836
+ question,
837
+ system_prompt
838
+ )
839
+
840
+ # Save fallback response to database
841
+ await lightrag_manager.db.save_conversation_message(
842
+ conversation_id, "user", question, {
843
+ "user_id": user_id,
844
+ "ai_type": "custom",
845
+ "ai_id": ai_id
846
+ }
847
+ )
848
+ await lightrag_manager.db.save_conversation_message(
849
+ conversation_id, "assistant", fallback_response, {
850
+ "mode": "cloudflare_fallback",
851
+ "ai_type": "custom",
852
+ "ai_id": ai_id,
853
+ "user_id": user_id,
854
+ "error": last_error
855
+ }
856
+ )
857
+
858
+ return {
859
+ "answer": fallback_response,
860
+ "mode": "cloudflare_fallback",
861
+ "status": "success",
862
+ "fallback_used": True
863
+ }
864
+
865
+ except Exception as fallback_error:
866
+ logger.error(f"❌ Even Cloudflare fallback failed for custom AI {ai_id}: {fallback_error}")
867
+
868
+ # Last resort - return informative error
869
+ error_response = f"I'm having trouble accessing my knowledge base right now. Please try again in a moment, or contact support if the issue persists. (Error: {last_error})"
870
+
871
+ return {
872
+ "answer": error_response,
873
+ "mode": "error",
874
+ "status": "error",
875
+ "fallback_used": True
876
+ }
877
+
878
+
879
+ @app.post("/admin/rebuild-custom-ai/{ai_id}")
880
+ async def rebuild_custom_ai(
881
+ ai_id: str,
882
+ current_user: dict = Depends(get_current_user)
883
+ ):
884
+ """Force rebuild a corrupted custom AI"""
885
+
886
+ if not lightrag_manager:
887
+ raise HTTPException(status_code=503, detail="LightRAG system not initialized")
888
+
889
+ try:
890
+ success = await lightrag_manager.force_rebuild_custom_ai(ai_id, current_user["id"])
891
+
892
+ if success:
893
+ return {"message": f"Successfully rebuilt custom AI {ai_id}"}
894
+ else:
895
+ raise HTTPException(status_code=500, detail="Failed to rebuild custom AI")
896
+
897
+ except Exception as e:
898
+ logger.error(f"Rebuild custom AI error: {e}")
899
+ raise HTTPException(status_code=500, detail=f"Rebuild error: {str(e)}")
900
+
901
+ @app.post("/chat/custom/{ai_id}", response_model=QuestionResponse)
902
+ async def chat_custom_ai_enhanced(
903
+ ai_id: str,
904
+ request: QuestionRequest,
905
+ current_user: dict = Depends(get_current_user)
906
+ ):
907
+ """Chat with custom AI using enhanced fallback system"""
908
+ if not lightrag_manager:
909
+ raise HTTPException(status_code=503, detail="LightRAG system not initialized")
910
+
911
+ try:
912
+ # Generate conversation ID if not provided
913
+ conversation_id = request.conversation_id or str(uuid.uuid4())
914
+
915
+ # Query with enhanced custom AI fallback
916
+ result = await query_custom_ai_with_fallback(
917
+ lightrag_manager=lightrag_manager,
918
  ai_id=ai_id,
919
+ question=request.question,
920
+ conversation_id=conversation_id,
921
+ user_id=current_user["id"],
922
+ preferred_mode=request.mode or "hybrid"
923
  )
924
+
925
+ await update_message_stats(request.question, result["answer"])
926
  return QuestionResponse(
927
+ answer=result["answer"],
928
+ mode=result["mode"],
929
+ status=result["status"],
930
  conversation_id=conversation_id
931
  )
932