al1kss commited on
Commit
5005b07
·
verified ·
1 Parent(s): 84a2b8a

Update lightrag_manager.py

Browse files
Files changed (1) hide show
  1. lightrag_manager.py +89 -351
lightrag_manager.py CHANGED
@@ -927,7 +927,7 @@ class PersistentLightRAGManager:
927
  except Exception as e:
928
  self.logger.error(f"Failed to save RAG to database: {e}")
929
  raise
930
-
931
  async def _load_from_database(self, config: RAGConfig) -> Optional[LightRAG]:
932
  """Load RAG from database using stored JSON data FIRST, then fallback to Blob URLs"""
933
 
@@ -938,421 +938,159 @@ class PersistentLightRAGManager:
938
  self.logger.info(f"No RAG instance found in database for {config.get_cache_key()}")
939
  return None
940
 
941
- self.logger.info(f"Found RAG instance: {instance_data['name']} (ID: {instance_data['id']})")
942
 
943
- # PRIORITY 1: Check if we have storage data in knowledge_files table (DATABASE FIRST)
944
  async with self.db.pool.acquire() as conn:
945
- stored_data = await conn.fetchval("""
946
- SELECT content_text FROM knowledge_files
 
947
  WHERE rag_instance_id = $1 AND filename = 'lightrag_storage.json'
948
  AND processing_status = 'processed'
949
  ORDER BY created_at DESC
950
  LIMIT 1
951
  """, instance_data['id'])
952
 
953
- if stored_data:
954
- self.logger.info("🎯 Loading RAG from DATABASE storage (priority method)")
955
 
956
- # Create new RAG instance
957
- rag = await self._create_new_rag_instance(config)
958
-
959
- # Restore storage data from database
960
  try:
961
- storage_data = json.loads(stored_data)
 
962
  self.logger.info(f"📊 Parsed storage data with keys: {list(storage_data.keys())}")
963
 
964
- # Write storage files back to temporary directory
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
965
  storage_restored = False
966
  for filename, data in storage_data.items():
967
- if data and (isinstance(data, (list, dict))):
968
- file_path = f"{rag.working_dir}/{filename}.json"
969
  with open(file_path, 'w') as f:
970
  json.dump(data, f)
971
 
972
  file_size = os.path.getsize(file_path)
973
- item_count = len(data) if isinstance(data, (list, dict)) else 'unknown'
974
- self.logger.info(f"✅ Restored {filename}.json: {file_size} bytes, {item_count} items")
975
  storage_restored = True
976
 
977
  if storage_restored:
978
- # Force reload storage from files
979
  await rag.initialize_storages()
 
980
 
981
- # Verify the restored RAG has working data
982
- verification_passed = await self._verify_rag_storage(rag)
983
  if verification_passed:
984
- self.logger.info("🎉 Successfully loaded and verified RAG from DATABASE storage")
985
  return rag
986
  else:
987
- self.logger.warning("⚠️ RAG loaded from database but verification failed")
988
  return None
989
  else:
990
- self.logger.warning("⚠️ No valid storage data found in database JSON")
991
  return None
992
 
 
 
 
993
  except Exception as e:
994
- self.logger.error(f"❌ Failed to restore storage data from database: {e}")
995
- # Don't return None here, try Blob storage as fallback
996
-
997
- # PRIORITY 2: Fallback to Vercel Blob storage (only if database fails)
998
- if (instance_data.get('graph_blob_url') and
999
- instance_data.get('vector_blob_url') and
1000
- instance_data.get('config_blob_url') and
1001
- not instance_data.get('graph_blob_url').startswith('database://')):
1002
-
1003
- self.logger.info("🔄 Falling back to Vercel Blob storage")
1004
- return await self._load_from_blob_storage(instance_data)
1005
-
1006
- self.logger.warning(f"❌ No usable storage data found for RAG instance {instance_data['id']}")
1007
- return None
1008
 
1009
  except Exception as e:
1010
- self.logger.error(f"Failed to load RAG from database: {e}")
1011
  return None
1012
-
1013
- async def _verify_rag_storage(self, rag: LightRAG) -> bool:
1014
- """Verify that RAG storage has been properly loaded with actual data"""
1015
  try:
 
 
 
 
 
 
 
 
1016
  # Check vector storage
1017
- vector_count = 0
1018
  if hasattr(rag.vector_storage, '_data') and rag.vector_storage._data:
1019
- vector_count = len(rag.vector_storage._data)
1020
 
1021
- # Check chunks storage
1022
- chunks_count = 0
1023
  if hasattr(rag, 'chunks') and rag.chunks:
1024
  try:
1025
  chunks_data = await rag.chunks.get_all()
1026
- chunks_count = len(chunks_data) if chunks_data else 0
1027
  except:
1028
  pass
1029
 
1030
- # Check entities storage
1031
- entities_count = 0
1032
  if hasattr(rag, 'entities') and rag.entities:
1033
  try:
1034
  entities_data = await rag.entities.get_all()
1035
- entities_count = len(entities_data) if entities_data else 0
1036
  except:
1037
  pass
1038
 
1039
- # Check relationships storage
1040
- relationships_count = 0
1041
  if hasattr(rag, 'relationships') and rag.relationships:
1042
  try:
1043
  relationships_data = await rag.relationships.get_all()
1044
- relationships_count = len(relationships_data) if relationships_data else 0
1045
  except:
1046
  pass
1047
 
1048
- self.logger.info(f"📋 RAG storage verification: vectors={vector_count}, chunks={chunks_count}, entities={entities_count}, relationships={relationships_count}")
 
1049
 
1050
- # Consider RAG loaded if ANY storage has data
1051
- has_data = vector_count > 0 or chunks_count > 0 or entities_count > 0 or relationships_count > 0
1052
 
1053
  if has_data:
1054
- self.logger.info("✅ RAG verification PASSED - has working data")
1055
  else:
1056
- self.logger.warning("❌ RAG verification FAILED - no data found")
1057
 
1058
  return has_data
1059
 
1060
  except Exception as e:
1061
- self.logger.error(f"Failed to verify RAG storage: {e}")
1062
  return False
1063
-
1064
- async def _load_from_blob_storage(self, instance_data: Dict[str, Any]) -> Optional[LightRAG]:
1065
- """Load RAG from Vercel Blob storage"""
1066
- try:
1067
- self.logger.info("Downloading RAG state from Vercel Blob...")
1068
-
1069
- graph_data = await self.blob_client.get(instance_data['graph_blob_url'])
1070
- vector_data = await self.blob_client.get(instance_data['vector_blob_url'])
1071
- config_data = await self.blob_client.get(instance_data['config_blob_url'])
1072
-
1073
- rag_state = {
1074
- 'graph': pickle.loads(gzip.decompress(graph_data)),
1075
- 'vectors': pickle.loads(gzip.decompress(vector_data)),
1076
- 'config': pickle.loads(gzip.decompress(config_data))
1077
- }
1078
-
1079
- self.logger.info("Successfully downloaded and deserialized RAG state")
1080
-
1081
- return await self._deserialize_rag_state(rag_state)
1082
-
1083
- except Exception as e:
1084
- self.logger.error(f"Failed to load RAG from Vercel Blob: {e}")
1085
- return None
1086
-
1087
- async def _serialize_rag_state(self, rag: LightRAG) -> Dict[str, Any]:
1088
- """Serialize RAG state for LightRAG 1.3.7"""
1089
- try:
1090
- graph_data = {"nodes": [], "edges": [], "graph_attrs": {}}
1091
- if hasattr(rag.graph_storage, 'graph') and rag.graph_storage.graph:
1092
- graph_data = {
1093
- "nodes": list(rag.graph_storage.graph.nodes(data=True)),
1094
- "edges": list(rag.graph_storage.graph.edges(data=True)),
1095
- "graph_attrs": dict(rag.graph_storage.graph.graph)
1096
- }
1097
-
1098
- vector_data = {"items": [], "metadata": [], "dimension": 1024}
1099
-
1100
- if hasattr(rag.vector_storage, '_data') and rag.vector_storage._data:
1101
- vector_data["items"] = len(rag.vector_storage._data)
1102
- self.logger.info(f"Serializing {len(rag.vector_storage._data)} vector storage items")
1103
- elif hasattr(rag.vector_storage, 'embeddings') and rag.vector_storage.embeddings is not None:
1104
- vector_data["embeddings"] = rag.vector_storage.embeddings.tolist()
1105
- self.logger.info(f"Serializing {len(vector_data['embeddings'])} embeddings")
1106
-
1107
- chunks_data = {"count": 0, "data": []}
1108
- if hasattr(rag, 'chunks') and rag.chunks:
1109
- try:
1110
- all_chunks = await rag.chunks.get_all()
1111
- if all_chunks:
1112
- chunks_data["count"] = len(all_chunks)
1113
- chunks_data["data"] = all_chunks[:100]
1114
- self.logger.info(f"Serializing {chunks_data['count']} chunks")
1115
- except Exception as e:
1116
- self.logger.warning(f"Failed to get chunks data: {e}")
1117
-
1118
- entities_data = {"count": 0}
1119
- if hasattr(rag, 'entities') and rag.entities:
1120
- try:
1121
- all_entities = await rag.entities.get_all()
1122
- if all_entities:
1123
- entities_data["count"] = len(all_entities)
1124
- self.logger.info(f"Serializing {entities_data['count']} entities")
1125
- except Exception as e:
1126
- self.logger.warning(f"Failed to get entities data: {e}")
1127
-
1128
- config_data = {
1129
- "max_parallel_insert": rag.max_parallel_insert,
1130
- "llm_model_name": getattr(rag, 'llm_model_name', 'unknown'),
1131
- "embedding_dimension": getattr(rag.embedding_func, 'embedding_dim', 1024),
1132
- "created_at": datetime.now().isoformat(),
1133
- "working_dir": rag.working_dir,
1134
- "lightrag_version": "1.3.7",
1135
- "storage_stats": {
1136
- "vector_items": vector_data.get("items", 0),
1137
- "chunks_count": chunks_data["count"],
1138
- "entities_count": entities_data["count"]
1139
- }
1140
- }
1141
-
1142
- return {
1143
- "graph": graph_data,
1144
- "vectors": vector_data,
1145
- "chunks": chunks_data,
1146
- "entities": entities_data,
1147
- "config": config_data,
1148
- "version": "1.3.7"
1149
- }
1150
-
1151
- except Exception as e:
1152
- self.logger.error(f"Failed to serialize RAG state: {e}")
1153
- raise
1154
-
1155
- async def _deserialize_rag_state(self, rag_state: Dict[str, Any]) -> LightRAG:
1156
- """Deserialize RAG state and reconstruct LightRAG"""
1157
- try:
1158
- config = rag_state["config"]
1159
-
1160
- working_dir = f"/tmp/rag_restored_{uuid.uuid4()}"
1161
- os.makedirs(working_dir, exist_ok=True)
1162
-
1163
- rag = LightRAG(
1164
- working_dir=working_dir,
1165
- max_parallel_insert=config.get("max_parallel_insert", 2),
1166
- llm_model_func=self.cloudflare_worker.query,
1167
- llm_model_name=config.get("llm_model_name", self.cloudflare_worker.llm_model_name),
1168
- llm_model_max_token_size=4080,
1169
- embedding_func=EmbeddingFunc(
1170
- embedding_dim=config.get("embedding_dimension", 1024),
1171
- max_token_size=2048,
1172
- func=self.cloudflare_worker.embedding_chunk,
1173
- ),
1174
- graph_storage="NetworkXStorage",
1175
- vector_storage="NanoVectorDBStorage",
1176
- )
1177
-
1178
- await rag.initialize_storages()
1179
-
1180
- graph_data = rag_state["graph"]
1181
- if graph_data["nodes"] and hasattr(rag.graph_storage, 'graph'):
1182
- rag.graph_storage.graph.add_nodes_from(graph_data["nodes"])
1183
- if graph_data["edges"] and hasattr(rag.graph_storage, 'graph'):
1184
- rag.graph_storage.graph.add_edges_from(graph_data["edges"])
1185
-
1186
- vector_data = rag_state["vectors"]
1187
- if vector_data["embeddings"] and hasattr(rag.vector_storage, 'embeddings'):
1188
- rag.vector_storage.embeddings = np.array(vector_data["embeddings"])
1189
- if hasattr(rag.vector_storage, 'metadata'):
1190
- rag.vector_storage.metadata = vector_data["metadata"]
1191
-
1192
- return rag
1193
-
1194
- except Exception as e:
1195
- self.logger.error(f"Failed to deserialize RAG state: {e}")
1196
- raise
1197
-
1198
- async def query_with_memory(
1199
- self,
1200
- ai_type: str,
1201
- question: str,
1202
- conversation_id: str,
1203
- user_id: Optional[str] = None,
1204
- ai_id: Optional[str] = None,
1205
- mode: str = "hybrid",
1206
- max_memory_turns: int = 10
1207
- ) -> str:
1208
- """Query RAG with conversation memory"""
1209
-
1210
- try:
1211
- rag = await self.get_or_create_rag_instance(ai_type, user_id, ai_id)
1212
-
1213
- messages = await self.db.get_conversation_messages(conversation_id)
1214
-
1215
- context_prompt = self._build_context_prompt(question, messages[-max_memory_turns*2:])
1216
-
1217
- response = await rag.aquery(context_prompt, QueryParam(mode=mode))
1218
-
1219
- await self.db.save_conversation_message(
1220
- conversation_id, "user", question, {
1221
- "user_id": user_id,
1222
- "ai_type": ai_type,
1223
- "ai_id": ai_id
1224
- }
1225
- )
1226
- await self.db.save_conversation_message(
1227
- conversation_id, "assistant", response, {
1228
- "mode": mode,
1229
- "ai_type": ai_type,
1230
- "user_id": user_id,
1231
- "ai_id": ai_id
1232
- }
1233
- )
1234
-
1235
- return response
1236
-
1237
- except Exception as e:
1238
- self.logger.error(f"Query with memory failed: {e}")
1239
- return "I apologize, but I'm experiencing technical difficulties. Please try again later."
1240
-
1241
- def _build_context_prompt(self, question: str, messages: List[Dict[str, Any]]) -> str:
1242
- """Build context prompt with conversation memory"""
1243
- if not messages:
1244
- return question
1245
-
1246
- context = "Previous conversation:\n"
1247
- for msg in messages:
1248
- role = msg['role']
1249
- content = msg['content'][:200] + "..." if len(msg['content']) > 200 else msg['content']
1250
- context += f"{role.title()}: {content}\n"
1251
-
1252
- context += f"\nCurrent question: {question}"
1253
- return context
1254
-
1255
- async def create_custom_ai(
1256
- self,
1257
- user_id: str,
1258
- ai_name: str,
1259
- description: str,
1260
- uploaded_files: List[Dict[str, Any]]
1261
- ) -> str:
1262
- """Create custom AI with uploaded files"""
1263
-
1264
- ai_id = str(uuid.uuid4())
1265
-
1266
- try:
1267
- rag = await self.get_or_create_rag_instance(
1268
- "custom", user_id, ai_id, ai_name, description
1269
- )
1270
-
1271
- combined_content = ""
1272
- for file_data in uploaded_files:
1273
- content = file_data.get('content', '')
1274
- combined_content += f"\n\n=== {file_data['filename']} ===\n\n{content}\n\n"
1275
-
1276
- if combined_content.strip():
1277
- await rag.ainsert(combined_content)
1278
-
1279
- config = RAGConfig(
1280
- ai_type="custom",
1281
- user_id=user_id,
1282
- ai_id=ai_id,
1283
- name=ai_name,
1284
- description=description
1285
- )
1286
-
1287
- await self._save_to_database(config, rag)
1288
-
1289
- return ai_id
1290
-
1291
- except Exception as e:
1292
- self.logger.error(f"Failed to create custom AI: {e}")
1293
- raise
1294
-
1295
- def _estimate_tokens(self, rag_state: Dict[str, Any]) -> int:
1296
- """Estimate token count from RAG state"""
1297
- try:
1298
- content_size = len(json.dumps(rag_state))
1299
- return content_size // 4
1300
- except:
1301
- return 0
1302
-
1303
- def get_conversation_memory_status(self, conversation_id: str) -> Dict[str, Any]:
1304
- """Get conversation memory status"""
1305
- if conversation_id in self.conversation_memory:
1306
- return {
1307
- "has_memory": True,
1308
- "message_count": len(self.conversation_memory[conversation_id]),
1309
- "last_updated": datetime.now().isoformat()
1310
- }
1311
- return {"has_memory": False, "message_count": 0}
1312
-
1313
- def clear_conversation_memory(self, conversation_id: str):
1314
- """Clear conversation memory"""
1315
- if conversation_id in self.conversation_memory:
1316
- del self.conversation_memory[conversation_id]
1317
-
1318
- async def cleanup(self):
1319
- """Clean up resources"""
1320
- self.rag_instances.clear()
1321
- self.conversation_memory.clear()
1322
- self.processing_locks.clear()
1323
- await self.db.close()
1324
- self.logger.info("LightRAG manager cleaned up")
1325
-
1326
- async def _verify_rag_storage(self, rag: LightRAG) -> bool:
1327
- """Verify that RAG storage has been properly loaded"""
1328
- try:
1329
- vector_count = 0
1330
- if hasattr(rag.vector_storage, '_data') and rag.vector_storage._data:
1331
- vector_count = len(rag.vector_storage._data)
1332
-
1333
- chunks_count = 0
1334
- if hasattr(rag, 'chunks') and rag.chunks:
1335
- try:
1336
- chunks_data = await rag.chunks.get_all()
1337
- chunks_count = len(chunks_data) if chunks_data else 0
1338
- except:
1339
- pass
1340
-
1341
- entities_count = 0
1342
- if hasattr(rag, 'entities') and rag.entities:
1343
- try:
1344
- entities_data = await rag.entities.get_all()
1345
- entities_count = len(entities_data) if entities_data else 0
1346
- except:
1347
- pass
1348
-
1349
- self.logger.info(f"RAG storage verification: vectors={vector_count}, chunks={chunks_count}, entities={entities_count}")
1350
-
1351
- return vector_count > 0 or chunks_count > 0 or entities_count > 0
1352
-
1353
- except Exception as e:
1354
- self.logger.error(f"Failed to verify RAG storage: {e}")
1355
- return False
1356
 
1357
  # Global instance
1358
  lightrag_manager: Optional[PersistentLightRAGManager] = None
 
927
  except Exception as e:
928
  self.logger.error(f"Failed to save RAG to database: {e}")
929
  raise
930
+
931
  async def _load_from_database(self, config: RAGConfig) -> Optional[LightRAG]:
932
  """Load RAG from database using stored JSON data FIRST, then fallback to Blob URLs"""
933
 
 
938
  self.logger.info(f"No RAG instance found in database for {config.get_cache_key()}")
939
  return None
940
 
941
+ self.logger.info(f"🔍 Found RAG instance: {instance_data['name']} (ID: {instance_data['id']})")
942
 
943
+ # Check if we have storage data in knowledge_files table
944
  async with self.db.pool.acquire() as conn:
945
+ storage_record = await conn.fetchrow("""
946
+ SELECT content_text, file_size, token_count
947
+ FROM knowledge_files
948
  WHERE rag_instance_id = $1 AND filename = 'lightrag_storage.json'
949
  AND processing_status = 'processed'
950
  ORDER BY created_at DESC
951
  LIMIT 1
952
  """, instance_data['id'])
953
 
954
+ if storage_record and storage_record['content_text']:
955
+ self.logger.info(f"🎯 Found database storage: {storage_record['file_size']} bytes, {storage_record['token_count']} tokens")
956
 
 
 
 
 
957
  try:
958
+ # Parse the JSON storage data
959
+ storage_data = json.loads(storage_record['content_text'])
960
  self.logger.info(f"📊 Parsed storage data with keys: {list(storage_data.keys())}")
961
 
962
+ # Check if we have actual data
963
+ total_items = 0
964
+ for key, data in storage_data.items():
965
+ if isinstance(data, (list, dict)):
966
+ item_count = len(data)
967
+ total_items += item_count
968
+ self.logger.info(f" - {key}: {item_count} items")
969
+
970
+ if total_items == 0:
971
+ self.logger.warning("❌ Storage data exists but contains no items")
972
+ return None
973
+
974
+ # Create new RAG instance structure
975
+ working_dir = f"/tmp/rag_restored_{uuid.uuid4()}"
976
+ os.makedirs(working_dir, exist_ok=True)
977
+
978
+ rag = LightRAG(
979
+ working_dir=working_dir,
980
+ max_parallel_insert=2,
981
+ llm_model_func=self.cloudflare_worker.query,
982
+ llm_model_name=self.cloudflare_worker.llm_models[0],
983
+ llm_model_max_token_size=4080,
984
+ embedding_func=EmbeddingFunc(
985
+ embedding_dim=1024,
986
+ max_token_size=2048,
987
+ func=self.cloudflare_worker.embedding_chunk,
988
+ ),
989
+ graph_storage="NetworkXStorage",
990
+ vector_storage="NanoVectorDBStorage",
991
+ )
992
+
993
+ # Initialize empty storages
994
+ await rag.initialize_storages()
995
+
996
+ # Restore storage files from database JSON
997
  storage_restored = False
998
  for filename, data in storage_data.items():
999
+ if data and isinstance(data, (list, dict)) and len(data) > 0:
1000
+ file_path = f"{working_dir}/{filename}.json"
1001
  with open(file_path, 'w') as f:
1002
  json.dump(data, f)
1003
 
1004
  file_size = os.path.getsize(file_path)
1005
+ self.logger.info(f"✅ Restored {filename}.json: {file_size} bytes")
 
1006
  storage_restored = True
1007
 
1008
  if storage_restored:
1009
+ # Re-initialize storages to load the restored files
1010
  await rag.initialize_storages()
1011
+ self.logger.info("🔄 Re-initialized storages with restored data")
1012
 
1013
+ # Verify the RAG has working data
1014
+ verification_passed = await self._verify_restored_rag(rag)
1015
  if verification_passed:
1016
+ self.logger.info("🎉 SUCCESS: Loaded and verified RAG from DATABASE storage")
1017
  return rag
1018
  else:
1019
+ self.logger.warning("⚠️ RAG restored but verification failed")
1020
  return None
1021
  else:
1022
+ self.logger.warning("⚠️ No valid storage files restored")
1023
  return None
1024
 
1025
+ except json.JSONDecodeError as e:
1026
+ self.logger.error(f"❌ Failed to parse JSON storage data: {e}")
1027
+ return None
1028
  except Exception as e:
1029
+ self.logger.error(f"❌ Failed to restore from database storage: {e}")
1030
+ return None
1031
+ else:
1032
+ self.logger.warning(f"⚠️ No storage data found in database for RAG {instance_data['id']}")
1033
+ return None
 
 
 
 
 
 
 
 
 
1034
 
1035
  except Exception as e:
1036
+ self.logger.error(f" Database loading failed: {e}")
1037
  return None
1038
+
1039
+ async def _verify_restored_rag(self, rag: LightRAG) -> bool:
1040
+ """Verify that restored RAG has actual working data"""
1041
  try:
1042
+ # Check different storage components
1043
+ checks = {
1044
+ 'vector_storage': 0,
1045
+ 'chunks': 0,
1046
+ 'entities': 0,
1047
+ 'relationships': 0
1048
+ }
1049
+
1050
  # Check vector storage
 
1051
  if hasattr(rag.vector_storage, '_data') and rag.vector_storage._data:
1052
+ checks['vector_storage'] = len(rag.vector_storage._data)
1053
 
1054
+ # Check chunks
 
1055
  if hasattr(rag, 'chunks') and rag.chunks:
1056
  try:
1057
  chunks_data = await rag.chunks.get_all()
1058
+ checks['chunks'] = len(chunks_data) if chunks_data else 0
1059
  except:
1060
  pass
1061
 
1062
+ # Check entities
 
1063
  if hasattr(rag, 'entities') and rag.entities:
1064
  try:
1065
  entities_data = await rag.entities.get_all()
1066
+ checks['entities'] = len(entities_data) if entities_data else 0
1067
  except:
1068
  pass
1069
 
1070
+ # Check relationships
 
1071
  if hasattr(rag, 'relationships') and rag.relationships:
1072
  try:
1073
  relationships_data = await rag.relationships.get_all()
1074
+ checks['relationships'] = len(relationships_data) if relationships_data else 0
1075
  except:
1076
  pass
1077
 
1078
+ # Log verification results
1079
+ self.logger.info(f"📋 RAG verification: {checks}")
1080
 
1081
+ # Consider successful if ANY component has data
1082
+ has_data = any(count > 0 for count in checks.values())
1083
 
1084
  if has_data:
1085
+ self.logger.info("✅ RAG verification PASSED")
1086
  else:
1087
+ self.logger.warning("❌ RAG verification FAILED - no data in any component")
1088
 
1089
  return has_data
1090
 
1091
  except Exception as e:
1092
+ self.logger.error(f" RAG verification error: {e}")
1093
  return False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1094
 
1095
  # Global instance
1096
  lightrag_manager: Optional[PersistentLightRAGManager] = None