al1kss commited on
Commit
4c2c868
·
verified ·
1 Parent(s): 12818d7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +465 -891
app.py CHANGED
@@ -1,770 +1,247 @@
 
1
  import os
2
  import json
3
- import asyncio
4
  import hashlib
5
  import shutil
6
- import zipfile
7
- import uuid
8
- import requests
9
- from datetime import datetime, timedelta
10
  from pathlib import Path
11
- from typing import List, Dict, Optional
12
 
13
- # FastAPI and dependencies
14
- from fastapi import FastAPI, HTTPException, Depends, File, UploadFile
15
  from fastapi.middleware.cors import CORSMiddleware
16
- from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
17
  from pydantic import BaseModel, EmailStr
18
- import aiohttp
19
- import jwt
20
 
21
- # Database imports
22
- import asyncpg
23
- import redis.asyncio as redis
24
- from contextlib import asynccontextmanager
 
 
 
25
 
26
- # Pydantic models
27
- class UserRegister(BaseModel):
28
- email: EmailStr
29
- name: str
30
 
31
- class UserLogin(BaseModel):
32
- email: EmailStr
 
 
 
 
 
 
 
 
 
 
 
33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  class QuestionRequest(BaseModel):
35
  question: str
36
- mode: str = "hybrid"
37
  conversation_id: Optional[str] = None
38
 
39
- class CustomAIRequest(BaseModel):
40
- name: str
41
- description: str
42
-
43
  class QuestionResponse(BaseModel):
44
  answer: str
45
  mode: str
46
  status: str
47
  conversation_id: Optional[str] = None
48
 
 
 
 
 
 
 
 
 
 
 
 
49
  class FileUploadResponse(BaseModel):
50
  filename: str
51
  size: int
52
  message: str
53
 
54
- # Database connection management
 
 
 
 
55
  class DatabaseManager:
56
- def __init__(self):
57
- self.pool = None
58
- self.redis = None
59
-
60
- async def connect(self):
61
- """Initialize database connections"""
62
- # PostgreSQL connection
63
- database_url = os.getenv('DATABASE_URL')
64
- if database_url:
65
- try:
66
- self.pool = await asyncpg.create_pool(database_url, max_size=20)
67
- print("✅ PostgreSQL connected successfully")
68
- except Exception as e:
69
- print(f"❌ PostgreSQL connection failed: {e}")
70
- self.pool = None
71
 
72
- # Redis connection
73
- redis_url = os.getenv('REDIS_URL')
74
- if redis_url:
75
- try:
76
- self.redis = redis.from_url(redis_url, decode_responses=True)
77
- await self.redis.ping()
78
- print("✅ Redis connected successfully")
79
- except Exception as e:
80
- print(f"❌ Redis connection failed: {e}")
81
- self.redis = None
82
-
83
- async def close(self):
84
- """Close database connections"""
85
- if self.pool:
86
- await self.pool.close()
87
- if self.redis:
88
- await self.redis.close()
89
-
90
  async def execute_query(self, query: str, *args):
91
- """Execute a PostgreSQL query"""
92
- if not self.pool:
93
- raise HTTPException(status_code=503, detail="Database not available")
94
-
95
- async with self.pool.acquire() as connection:
96
- try:
97
- if query.strip().upper().startswith('SELECT'):
98
- return await connection.fetch(query, *args)
99
- else:
100
- return await connection.execute(query, *args)
101
- except Exception as e:
102
- print(f"Database query error: {e}")
103
- raise HTTPException(status_code=500, detail="Database operation failed")
104
 
105
- async def cache_set(self, key: str, value: any, ttl: int = 3600):
106
- """Set cache value with TTL"""
107
- if self.redis:
108
- try:
109
- await self.redis.setex(key, ttl, json.dumps(value))
110
- return True
111
- except Exception as e:
112
- print(f"Cache set error: {e}")
113
- return False
114
 
115
  async def cache_get(self, key: str):
116
- """Get cache value"""
117
- if self.redis:
118
- try:
119
- value = await self.redis.get(key)
120
- return json.loads(value) if value else None
121
- except Exception as e:
122
- print(f"Cache get error: {e}")
123
  return None
124
 
125
  async def cache_delete(self, key: str):
126
- """Delete cache value"""
127
- if self.redis:
128
- try:
129
- await self.redis.delete(key)
130
- return True
131
- except Exception as e:
132
- print(f"Cache delete error: {e}")
133
- return False
134
 
135
  # Initialize database manager
136
- db_manager = DatabaseManager()
137
 
138
- # Cloudflare Worker class (keep your existing implementation)
139
- class CloudflareWorker:
140
- def __init__(self, cloudflare_api_key: str, api_base_url: str, llm_model_name: str, max_tokens: int = 1024):
141
- self.cloudflare_api_key = cloudflare_api_key
142
- self.api_base_url = api_base_url
143
- self.llm_model_name = llm_model_name
144
- self.max_tokens = max_tokens
145
-
146
- async def _send_request(self, model_name: str, input_data: dict) -> str:
147
- headers = {"Authorization": f"Bearer {self.cloudflare_api_key}"}
148
- url = f"{self.api_base_url}{model_name}"
149
-
150
- async with aiohttp.ClientSession() as session:
151
- async with session.post(url, headers=headers, json=input_data) as response:
152
- if response.status == 200:
153
- result = await response.json()
154
- return result["result"]["response"]
155
- else:
156
- error_text = await response.text()
157
- raise HTTPException(status_code=response.status, detail=f"Cloudflare API error: {error_text}")
158
-
159
- async def query(self, prompt: str, system_prompt: str = '') -> str:
160
- message = [
161
- {"role": "system", "content": system_prompt},
162
- {"role": "user", "content": prompt}
163
- ]
164
-
165
- input_ = {
166
- "messages": message,
167
- "max_tokens": self.max_tokens,
168
- }
169
-
170
- result = await self._send_request(self.llm_model_name, input_)
171
- return result
172
-
173
- # User Management with Database
174
- class UserManager:
175
- @staticmethod
176
- def hash_email(email: str) -> str:
177
- return hashlib.md5(email.encode()).hexdigest()[:12]
178
-
179
- @staticmethod
180
- async def create_user(email: str, name: str) -> dict:
181
- hashed_email = UserManager.hash_email(email)
182
- user_id = str(uuid.uuid4())
183
-
184
- # Check if user exists
185
- existing_query = "SELECT id FROM users WHERE email = $1"
186
- existing = await db_manager.execute_query(existing_query, email)
187
- if existing:
188
- raise HTTPException(status_code=400, detail="User already exists")
189
-
190
- # Create user
191
- query = """
192
- INSERT INTO users (id, email, name, hashed_email, created_at, updated_at)
193
- VALUES ($1, $2, $3, $4, $5, $6)
194
- RETURNING id, email, name, created_at
195
- """
196
-
197
- now = datetime.now()
198
- result = await db_manager.execute_query(
199
- query, user_id, email, name, hashed_email, now, now
200
- )
201
-
202
- user = {
203
- "id": user_id,
204
- "email": email,
205
- "name": name,
206
- "created_at": now.isoformat()
207
- }
208
-
209
- # Cache user
210
- await db_manager.cache_set(f"user:{user_id}", user, 3600)
211
- await db_manager.cache_set(f"user:email:{email}", user, 3600)
212
-
213
- return user
214
-
215
- @staticmethod
216
- async def get_user_by_email(email: str) -> Optional[dict]:
217
- # Try cache first
218
- cached = await db_manager.cache_get(f"user:email:{email}")
219
- if cached:
220
- return cached
221
-
222
- # Query database
223
- query = "SELECT id, email, name, created_at, hashed_email FROM users WHERE email = $1 AND is_active = true"
224
- result = await db_manager.execute_query(query, email)
225
-
226
- if result:
227
- user_row = result[0]
228
- user = {
229
- "id": user_row['id'],
230
- "email": user_row['email'],
231
- "name": user_row['name'],
232
- "created_at": user_row['created_at'].isoformat(),
233
- "hashed_email": user_row['hashed_email']
234
- }
235
-
236
- # Cache user
237
- await db_manager.cache_set(f"user:email:{email}", user, 3600)
238
- await db_manager.cache_set(f"user:{user['id']}", user, 3600)
239
-
240
- return user
241
- return None
242
-
243
- @staticmethod
244
- async def get_user_by_id(user_id: str) -> Optional[dict]:
245
- # Try cache first
246
- cached = await db_manager.cache_get(f"user:{user_id}")
247
- if cached:
248
- return cached
249
-
250
- # Query database
251
- query = "SELECT id, email, name, created_at FROM users WHERE id = $1 AND is_active = true"
252
- result = await db_manager.execute_query(query, user_id)
253
-
254
- if result:
255
- user_row = result[0]
256
- user = {
257
- "id": user_row['id'],
258
- "email": user_row['email'],
259
- "name": user_row['name'],
260
- "created_at": user_row['created_at'].isoformat()
261
- }
262
-
263
- # Cache user
264
- await db_manager.cache_set(f"user:{user_id}", user, 3600)
265
-
266
- return user
267
- return None
268
 
269
- # Conversation Management
270
  class ConversationManager:
271
  @staticmethod
272
  async def create_conversation(user_id: str, ai_type: str, ai_id: Optional[str] = None, title: Optional[str] = None) -> str:
273
  conversation_id = str(uuid.uuid4())
274
- query = """
275
- INSERT INTO conversations (id, user_id, ai_type, ai_id, title, created_at, updated_at)
276
- VALUES ($1, $2, $3, $4, $5, $6, $7)
277
- RETURNING id
278
- """
279
-
280
- now = datetime.now()
281
- await db_manager.execute_query(
282
- query, conversation_id, user_id, ai_type, ai_id, title or f"{ai_type} conversation", now, now
283
- )
284
-
285
- # Invalidate user conversations cache
286
- await db_manager.cache_delete(f"user:{user_id}:conversations")
287
-
288
  return conversation_id
289
 
290
  @staticmethod
291
  async def add_message(conversation_id: str, role: str, content: str, metadata: Optional[dict] = None) -> str:
292
  message_id = str(uuid.uuid4())
293
- query = """
294
- INSERT INTO messages (id, conversation_id, role, content, metadata, created_at)
295
- VALUES ($1, $2, $3, $4, $5, $6)
296
- RETURNING id
297
- """
298
-
299
- await db_manager.execute_query(
300
- query, message_id, conversation_id, role, content,
301
- json.dumps(metadata or {}), datetime.now()
302
- )
303
-
304
- # Update conversation timestamp
305
- update_query = "UPDATE conversations SET updated_at = $1 WHERE id = $2"
306
- await db_manager.execute_query(update_query, datetime.now(), conversation_id)
307
-
308
  return message_id
309
 
310
  @staticmethod
311
  async def get_conversation_messages(conversation_id: str, user_id: str) -> List[dict]:
312
- query = """
313
- SELECT m.id, m.role, m.content, m.metadata, m.created_at
314
- FROM messages m
315
- JOIN conversations c ON m.conversation_id = c.id
316
- WHERE c.id = $1 AND c.user_id = $2 AND c.is_active = true
317
- ORDER BY m.created_at ASC
318
- """
319
-
320
- result = await db_manager.execute_query(query, conversation_id, user_id)
321
-
322
- messages = []
323
- for row in result:
324
- messages.append({
325
- "id": row['id'],
326
- "role": row['role'],
327
- "content": row['content'],
328
- "metadata": json.loads(row['metadata']) if row['metadata'] else {},
329
- "created_at": row['created_at'].isoformat()
330
- })
331
-
332
- return messages
333
 
334
- # Custom AI Management
335
  class CustomAIManager:
336
  @staticmethod
337
  async def create_custom_ai(user_id: str, name: str, description: str, knowledge_files: List[dict]) -> str:
338
  ai_id = str(uuid.uuid4())
339
- query = """
340
- INSERT INTO custom_ais (id, user_id, name, description, knowledge_files, chunks_count, created_at, updated_at)
341
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
342
- RETURNING id
343
- """
344
-
345
- now = datetime.now()
346
- await db_manager.execute_query(
347
- query, ai_id, user_id, name, description,
348
- json.dumps(knowledge_files), len(knowledge_files), now, now
349
- )
350
-
351
- # Invalidate user cache
352
- await db_manager.cache_delete(f"user:{user_id}:ais")
353
-
354
  return ai_id
355
 
356
- @staticmethod
357
- async def get_user_ais(user_id: str) -> List[dict]:
358
- # Try cache first
359
- cached = await db_manager.cache_get(f"user:{user_id}:ais")
360
- if cached:
361
- return cached
362
-
363
- query = """
364
- SELECT id, name, description, knowledge_files, chunks_count, created_at, updated_at
365
- FROM custom_ais
366
- WHERE user_id = $1 AND is_active = true
367
- ORDER BY created_at DESC
368
- """
369
-
370
- result = await db_manager.execute_query(query, user_id)
371
-
372
- ais = []
373
- for row in result:
374
- ais.append({
375
- "id": row['id'],
376
- "name": row['name'],
377
- "description": row['description'],
378
- "knowledge_files": json.loads(row['knowledge_files']) if row['knowledge_files'] else [],
379
- "chunks_count": row['chunks_count'],
380
- "created_at": row['created_at'].isoformat(),
381
- "updated_at": row['updated_at'].isoformat()
382
- })
383
-
384
- # Cache for 30 minutes
385
- await db_manager.cache_set(f"user:{user_id}:ais", ais, 1800)
386
-
387
- return ais
388
-
389
  @staticmethod
390
  async def get_ai_by_id(ai_id: str, user_id: str) -> Optional[dict]:
391
- query = """
392
- SELECT id, name, description, knowledge_files, created_at
393
- FROM custom_ais
394
- WHERE id = $1 AND user_id = $2 AND is_active = true
395
- """
396
-
397
- result = await db_manager.execute_query(query, ai_id, user_id)
398
-
399
- if result:
400
- row = result[0]
401
- return {
402
- "id": row['id'],
403
- "name": row['name'],
404
- "description": row['description'],
405
- "knowledge_files": json.loads(row['knowledge_files']) if row['knowledge_files'] else [],
406
- "created_at": row['created_at'].isoformat()
407
- }
408
- return None
409
-
410
- # Simple knowledge store (keep your existing implementation)
411
- class SimpleKnowledgeStore:
412
- def __init__(self, data_dir: str):
413
- self.data_dir = data_dir
414
- self.chunks = []
415
- self.entities = []
416
- self.load_data()
417
 
418
- def load_data(self):
419
- try:
420
- chunks_file = Path(self.data_dir) / "kv_store_text_chunks.json"
421
- if chunks_file.exists():
422
- with open(chunks_file, 'r', encoding='utf-8') as f:
423
- data = json.load(f)
424
- self.chunks = list(data.values()) if data else []
425
-
426
- knowledge_file = Path(self.data_dir) / "knowledge.json"
427
- if knowledge_file.exists():
428
- with open(knowledge_file, 'r', encoding='utf-8') as f:
429
- data = json.load(f)
430
- if 'chunks' in data:
431
- self.chunks = data['chunks']
432
- except Exception as e:
433
- print(f"Error loading knowledge store: {e}")
434
-
435
- def search(self, query: str, limit: int = 3) -> List[str]:
436
- if not self.chunks:
437
- return []
438
-
439
- query_lower = query.lower()
440
- scored_chunks = []
441
-
442
- for chunk in self.chunks:
443
- if isinstance(chunk, str):
444
- score = chunk.lower().count(query_lower)
445
- if score > 0:
446
- scored_chunks.append((score, chunk))
447
-
448
- scored_chunks.sort(key=lambda x: x[0], reverse=True)
449
- return [chunk for _, chunk in scored_chunks[:limit]]
450
-
451
- # Multi-user knowledge manager
452
- class MultiUserKnowledgeManager:
453
- def __init__(self, base_dir: str):
454
- self.base_dir = Path(base_dir)
455
- self.user_stores = {}
456
-
457
- def get_user_store(self, user_id: str, ai_id: str = "default") -> SimpleKnowledgeStore:
458
- store_key = f"{user_id}_{ai_id}"
459
- if store_key not in self.user_stores:
460
- user_dir = self.base_dir / f"user_{user_id}" / f"ai_{ai_id}"
461
- user_dir.mkdir(parents=True, exist_ok=True)
462
- self.user_stores[store_key] = SimpleKnowledgeStore(str(user_dir))
463
- return self.user_stores[store_key]
464
-
465
- def create_custom_ai(self, user_id: str, ai_name: str, uploaded_files: List[str]) -> str:
466
- ai_id = str(uuid.uuid4())
467
- ai_dir = self.base_dir / f"user_{user_id}" / f"ai_{ai_id}"
468
- ai_dir.mkdir(parents=True, exist_ok=True)
469
-
470
- knowledge_chunks = []
471
- for file_path in uploaded_files:
472
- if Path(file_path).exists():
473
- try:
474
- content = Path(file_path).read_text(encoding='utf-8', errors='ignore')
475
- paragraphs = content.split('\n\n')
476
- for para in paragraphs:
477
- if para.strip():
478
- sentences = para.split('. ')
479
- if len(sentences) > 3:
480
- for i in range(0, len(sentences), 3):
481
- chunk = '. '.join(sentences[i:i+3])
482
- if chunk.strip():
483
- knowledge_chunks.append(chunk.strip())
484
- else:
485
- knowledge_chunks.append(para.strip())
486
- except Exception as e:
487
- print(f"Error processing {file_path}: {e}")
488
-
489
- knowledge_file = ai_dir / "knowledge.json"
490
- with open(knowledge_file, 'w', encoding='utf-8') as f:
491
- json.dump({
492
- "ai_id": ai_id,
493
- "name": ai_name,
494
- "chunks": knowledge_chunks,
495
- "created_at": datetime.now().isoformat()
496
- }, f, ensure_ascii=False, indent=2)
497
-
498
- self.user_stores[f"{user_id}_{ai_id}"] = SimpleKnowledgeStore(str(ai_dir))
499
-
500
- return ai_id
501
-
502
- # Configuration
503
- CLOUDFLARE_API_KEY = os.getenv('CLOUDFLARE_API_KEY', 'lMbDDfHi887AK243ZUenm4dHV2nwEx2NSmX6xuq5')
504
- API_BASE_URL = "https://api.cloudflare.com/client/v4/accounts/07c4bcfbc1891c3e528e1c439fee68bd/ai/run/"
505
- LLM_MODEL = "@cf/meta/llama-3.2-3b-instruct"
506
- WORKING_DIR = "./dickens"
507
- USER_DATA_DIR = "./user_data"
508
- JWT_SECRET = os.getenv('JWT_SECRET', 'abd3d1ba8fe8982ea3390b8851427c49')
509
-
510
- # Global instances
511
- cloudflare_worker = None
512
- fire_safety_store = None
513
- user_knowledge_manager = None
514
-
515
- # JWT helper functions
516
- def create_jwt_token(user_data: dict) -> str:
517
- payload = {
518
- "user_id": user_data["id"],
519
- "email": user_data["email"],
520
- "exp": datetime.utcnow() + timedelta(days=7)
521
- }
522
- return jwt.encode(payload, JWT_SECRET, algorithm="HS256")
523
-
524
- def verify_jwt_token(token: str) -> dict:
525
- try:
526
- payload = jwt.decode(token, JWT_SECRET, algorithms=["HS256"])
527
- return payload
528
- except jwt.ExpiredSignatureError:
529
- raise HTTPException(status_code=401, detail="Token expired")
530
- except jwt.InvalidTokenError:
531
- raise HTTPException(status_code=401, detail="Invalid token")
532
-
533
- # Security
534
- security = HTTPBearer()
535
 
536
- async def get_current_user(credentials: HTTPAuthorizationCredentials = Depends(security)):
537
- token = credentials.credentials
538
- payload = verify_jwt_token(token)
539
- user_id = payload["user_id"]
540
-
541
- user = await UserManager.get_user_by_id(user_id)
542
- if not user:
543
- raise HTTPException(status_code=401, detail="User not found")
544
 
545
- return user
 
 
 
546
 
547
- # Initialize system
548
- async def initialize_system():
549
- global cloudflare_worker, fire_safety_store, user_knowledge_manager
550
-
551
- print("🔄 Initializing YourAI System...")
552
 
553
- # Connect to databases
554
- await db_manager.connect()
555
 
556
  # Initialize Cloudflare worker
557
  cloudflare_worker = CloudflareWorker(
558
  cloudflare_api_key=CLOUDFLARE_API_KEY,
559
  api_base_url=API_BASE_URL,
560
  llm_model_name=LLM_MODEL,
 
561
  )
562
 
563
- # Initialize fire safety knowledge store
564
- dickens_path = Path(WORKING_DIR)
565
- has_data = dickens_path.exists() and len(list(dickens_path.glob("*.json"))) > 0
566
 
567
- if not has_data:
568
- print("📥 Downloading RAG database...")
569
- try:
570
- data_url = "https://github.com/YOUR_USERNAME/fire-safety-ai/releases/download/v1.0-data/dickens.zip"
571
-
572
- print(f"Downloading from: {data_url}")
573
- response = requests.get(data_url, timeout=60)
574
- response.raise_for_status()
575
-
576
- with open("dickens.zip", "wb") as f:
577
- f.write(response.content)
578
-
579
- with zipfile.ZipFile("dickens.zip", 'r') as zip_ref:
580
- zip_ref.extractall(".")
581
-
582
- os.remove("dickens.zip")
583
- print("Data downloaded!")
584
-
585
- except Exception as e:
586
- print(f"⚠️ Download failed: {e}")
587
- os.makedirs(WORKING_DIR, exist_ok=True)
588
-
589
- fire_safety_store = SimpleKnowledgeStore(WORKING_DIR)
590
- user_knowledge_manager = MultiUserKnowledgeManager(USER_DATA_DIR)
591
-
592
- print("✅ YourAI System ready!")
593
 
594
- # Lifespan context manager
595
- @asynccontextmanager
596
- async def lifespan(app: FastAPI):
597
- # Startup
598
- await initialize_system()
599
- yield
600
- # Shutdown
601
- await db_manager.close()
602
 
603
- # Initialize FastAPI with lifespan
604
- app = FastAPI(
605
- title="YourAI Multi-Model API",
606
- version="2.0.0",
607
- lifespan=lifespan
608
- )
609
-
610
- # Enable CORS
611
- app.add_middleware(
612
- CORSMiddleware,
613
- allow_origins=["*"],
614
- allow_credentials=True,
615
- allow_methods=["*"],
616
- allow_headers=["*"],
617
- )
618
-
619
- # API Endpoints
620
  @app.get("/")
621
  async def root():
622
- return {"message": "YourAI Multi-Model API", "status": "running", "version": "2.0.0"}
623
 
 
624
  @app.get("/health")
625
  async def health_check():
626
- # Get stats from database
627
- try:
628
- stats_query = "SELECT COUNT(*) as count FROM users WHERE is_active = true"
629
- user_count_result = await db_manager.execute_query(stats_query)
630
- user_count = user_count_result[0]['count'] if user_count_result else 0
631
-
632
- ais_query = "SELECT COUNT(*) as count FROM custom_ais WHERE is_active = true"
633
- ais_count_result = await db_manager.execute_query(ais_query)
634
- ais_count = ais_count_result[0]['count'] if ais_count_result else 0
635
- except Exception:
636
- user_count = 0
637
- ais_count = 0
638
-
639
  return {
640
  "status": "healthy",
641
- "models": ["fire-safety", "general", "physics", "custom"],
642
- "users_count": user_count,
643
- "active_custom_ais": ais_count,
644
- "fire_safety_chunks": len(fire_safety_store.chunks) if fire_safety_store else 0,
645
- "database_connected": db_manager.pool is not None,
646
- "cache_connected": db_manager.redis is not None
647
  }
648
 
649
- # Authentication endpoints
650
- @app.post("/auth/register")
651
- async def register_user(user_data: UserRegister):
652
  try:
653
- user = await UserManager.create_user(user_data.email, user_data.name)
654
- token = create_jwt_token(user)
655
-
656
- return {
657
- "user": user,
658
- "token": token,
659
- "message": "User registered successfully"
660
- }
661
- except HTTPException:
662
- raise
663
  except Exception as e:
664
- raise HTTPException(status_code=500, detail=str(e))
665
-
666
- @app.post("/auth/login")
667
- async def login_user(login_data: UserLogin):
668
- user = await UserManager.get_user_by_email(login_data.email)
669
- if not user:
670
- raise HTTPException(status_code=404, detail="User not found")
671
-
672
- token = create_jwt_token(user)
673
-
674
- return {
675
- "user": user,
676
- "token": token,
677
- "message": "Login successful"
678
- }
679
 
680
- # File upload for custom AI
681
- @app.post("/upload-files", response_model=List[FileUploadResponse])
682
- async def upload_files(
683
- files: List[UploadFile] = File(...),
684
- current_user: dict = Depends(get_current_user)
685
- ):
686
- user_id = current_user["id"]
687
- user_upload_dir = Path(USER_DATA_DIR) / f"user_{user_id}" / "uploads"
688
- user_upload_dir.mkdir(parents=True, exist_ok=True)
689
-
690
- uploaded_files = []
691
-
692
- for file in files:
693
- if not file.filename:
694
- continue
695
-
696
- allowed_extensions = ['.txt', '.md', '.pdf', '.doc', '.docx']
697
- file_ext = Path(file.filename).suffix.lower()
698
-
699
- if file_ext not in allowed_extensions:
700
- raise HTTPException(
701
- status_code=400,
702
- detail=f"File type {file_ext} not supported. Allowed: {allowed_extensions}"
703
- )
704
-
705
- file_path = user_upload_dir / file.filename
706
- with open(file_path, "wb") as buffer:
707
- shutil.copyfileobj(file.file, buffer)
708
-
709
- uploaded_files.append(FileUploadResponse(
710
- filename=file.filename,
711
- size=file_path.stat().st_size,
712
- message="Uploaded successfully"
713
- ))
714
-
715
- return uploaded_files
716
-
717
- # Create custom AI
718
- @app.post("/create-custom-ai")
719
- async def create_custom_ai(
720
- ai_data: CustomAIRequest,
721
- current_user: dict = Depends(get_current_user)
722
- ):
723
- user_id = current_user["id"]
724
- user_upload_dir = Path(USER_DATA_DIR) / f"user_{user_id}" / "uploads"
725
-
726
- if not user_upload_dir.exists() or not list(user_upload_dir.glob("*")):
727
- raise HTTPException(status_code=400, detail="No files uploaded. Please upload knowledge files first.")
728
-
729
- uploaded_files = [str(f) for f in user_upload_dir.glob("*") if f.is_file()]
730
-
731
- # Create knowledge store
732
- ai_id = user_knowledge_manager.create_custom_ai(user_id, ai_data.name, uploaded_files)
733
-
734
- # Store in database
735
- knowledge_files_metadata = [{"filename": Path(f).name, "size": Path(f).stat().st_size} for f in uploaded_files]
736
- db_ai_id = await CustomAIManager.create_custom_ai(user_id, ai_data.name, ai_data.description, knowledge_files_metadata)
737
-
738
- ai_info = {
739
- "id": ai_id,
740
- "name": ai_data.name,
741
- "description": ai_data.description,
742
- "created_at": datetime.now().isoformat(),
743
- "files_count": len(uploaded_files)
744
- }
745
-
746
- return {
747
- "ai_id": ai_id,
748
- "message": "Custom AI created successfully",
749
- "ai_info": ai_info
750
- }
751
-
752
- # Get user's custom AIs
753
- @app.get("/my-ais")
754
- async def get_user_ais(current_user: dict = Depends(get_current_user)):
755
- user_id = current_user["id"]
756
- ais = await CustomAIManager.get_user_ais(user_id)
757
-
758
- return {
759
- "ais": ais,
760
- "count": len(ais)
761
- }
762
 
763
- # Chat endpoints
764
  @app.post("/chat/fire-safety", response_model=QuestionResponse)
765
  async def chat_fire_safety(request: QuestionRequest, current_user: dict = Depends(get_current_user)):
766
- if not cloudflare_worker or not fire_safety_store:
767
- raise HTTPException(status_code=503, detail="System not initialized")
768
 
769
  user_id = current_user["id"]
770
 
@@ -774,31 +251,41 @@ async def chat_fire_safety(request: QuestionRequest, current_user: dict = Depend
774
  if not conversation_id:
775
  conversation_id = await ConversationManager.create_conversation(user_id, "fire-safety")
776
 
777
- # Add user message
778
  await ConversationManager.add_message(conversation_id, "user", request.question)
779
 
780
- # Search for relevant context
781
- relevant_chunks = fire_safety_store.search(request.question, limit=3)
782
- context = "\n".join(relevant_chunks) if relevant_chunks else "No specific context found."
783
 
784
- system_prompt = """You are a Fire Safety AI Assistant specializing in fire safety regulations.
785
- Use the provided context to answer questions about building codes, emergency exits, and fire safety requirements."""
786
-
787
- user_prompt = f"""Context: {context}
788
-
789
- Question: {request.question}
790
-
791
- Please provide a helpful answer based on the context about fire safety regulations."""
792
 
793
- response = await cloudflare_worker.query(user_prompt, system_prompt)
 
 
 
 
 
 
794
 
795
- # Add assistant response
796
- await ConversationManager.add_message(conversation_id, "assistant", response, {"mode": request.mode, "context_used": len(relevant_chunks)})
 
 
 
 
797
 
798
- return QuestionResponse(answer=response, mode=request.mode, status="success", conversation_id=conversation_id)
799
  except Exception as e:
 
800
  raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
801
 
 
802
  @app.post("/chat/general", response_model=QuestionResponse)
803
  async def chat_general(request: QuestionRequest, current_user: dict = Depends(get_current_user)):
804
  if not cloudflare_worker:
@@ -815,25 +302,57 @@ async def chat_general(request: QuestionRequest, current_user: dict = Depends(ge
815
  # Add user message
816
  await ConversationManager.add_message(conversation_id, "user", request.question)
817
 
818
- system_prompt = """You are a helpful general AI assistant. Provide accurate, helpful, and engaging responses to user questions."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
819
 
820
  response = await cloudflare_worker.query(request.question, system_prompt)
821
 
822
  # Add assistant response
823
- await ConversationManager.add_message(conversation_id, "assistant", response, {"mode": request.mode})
 
 
 
 
 
 
 
 
 
 
 
 
824
 
825
- return QuestionResponse(answer=response, mode=request.mode, status="success", conversation_id=conversation_id)
826
  except Exception as e:
 
827
  raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
828
 
 
829
  @app.post("/chat/custom/{ai_id}", response_model=QuestionResponse)
830
  async def chat_custom_ai(
831
  ai_id: str,
832
  request: QuestionRequest,
833
  current_user: dict = Depends(get_current_user)
834
  ):
835
- if not cloudflare_worker or not user_knowledge_manager:
836
- raise HTTPException(status_code=503, detail="System not initialized")
837
 
838
  user_id = current_user["id"]
839
 
@@ -851,30 +370,167 @@ async def chat_custom_ai(
851
  # Add user message
852
  await ConversationManager.add_message(conversation_id, "user", request.question)
853
 
854
- # Get the knowledge store for this custom AI
855
- custom_store = user_knowledge_manager.get_user_store(user_id, ai_id)
856
 
857
- # Search for relevant context
858
- relevant_chunks = custom_store.search(request.question, limit=3)
859
- context = "\n".join(relevant_chunks) if relevant_chunks else "No specific context found."
 
 
 
 
 
860
 
861
- system_prompt = f"""You are {ai_info['name']}, a custom AI assistant. {ai_info.get('description', '')}
862
- Use the provided context from the uploaded knowledge base to answer questions accurately."""
 
 
 
 
 
 
 
 
 
863
 
864
- user_prompt = f"""Context: {context}
 
 
 
 
 
 
 
 
 
865
 
866
- Question: {request.question}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
867
 
868
- Please provide a helpful answer based on the uploaded knowledge base."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
869
 
870
- response = await cloudflare_worker.query(user_prompt, system_prompt)
 
 
 
 
 
 
 
871
 
872
- # Add assistant response
873
- await ConversationManager.add_message(conversation_id, "assistant", response, {"mode": request.mode, "ai_name": ai_info['name'], "context_used": len(relevant_chunks)})
 
 
 
874
 
875
- return QuestionResponse(answer=response, mode=request.mode, status="success", conversation_id=conversation_id)
876
  except Exception as e:
877
- raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
878
 
879
  # Get user conversations
880
  @app.get("/conversations")
@@ -887,44 +543,15 @@ async def get_conversations(current_user: dict = Depends(get_current_user)):
887
  return {"conversations": cached}
888
 
889
  try:
890
- query = """
891
- SELECT c.id, c.ai_type, c.ai_id, c.title, c.created_at, c.updated_at,
892
- m.content as last_message, m.role as last_message_role,
893
- ca.name as ai_name
894
- FROM conversations c
895
- LEFT JOIN LATERAL (
896
- SELECT content, role FROM messages
897
- WHERE conversation_id = c.id
898
- ORDER BY created_at DESC LIMIT 1
899
- ) m ON true
900
- LEFT JOIN custom_ais ca ON c.ai_id = ca.id
901
- WHERE c.user_id = $1 AND c.is_active = true
902
- ORDER BY c.updated_at DESC
903
- LIMIT 50
904
- """
905
-
906
- result = await db_manager.execute_query(query, user_id)
907
-
908
  conversations = []
909
- for row in result:
910
- conversations.append({
911
- "id": row['id'],
912
- "ai_type": row['ai_type'],
913
- "ai_id": row['ai_id'],
914
- "title": row['title'],
915
- "last_message": row['last_message'],
916
- "last_message_role": row['last_message_role'],
917
- "ai_name": row['ai_name'],
918
- "created_at": row['created_at'].isoformat() if row['created_at'] else None,
919
- "updated_at": row['updated_at'].isoformat() if row['updated_at'] else None
920
- })
921
 
922
  # Cache for 15 minutes
923
  await db_manager.cache_set(f"user:{user_id}:conversations", conversations, 900)
924
 
925
  return {"conversations": conversations}
926
  except Exception as e:
927
- print(f"Error getting conversations: {e}")
928
  return {"conversations": []}
929
 
930
  # Get specific conversation messages
@@ -936,7 +563,7 @@ async def get_conversation_messages(conversation_id: str, current_user: dict = D
936
  messages = await ConversationManager.get_conversation_messages(conversation_id, user_id)
937
  return {"messages": messages}
938
  except Exception as e:
939
- print(f"Error getting conversation messages: {e}")
940
  return {"messages": []}
941
 
942
  # Delete conversation
@@ -945,17 +572,48 @@ async def delete_conversation(conversation_id: str, current_user: dict = Depends
945
  user_id = current_user["id"]
946
 
947
  try:
948
- query = "UPDATE conversations SET is_active = false WHERE id = $1 AND user_id = $2"
949
- await db_manager.execute_query(query, conversation_id, user_id)
 
950
 
951
  # Invalidate cache
952
  await db_manager.cache_delete(f"user:{user_id}:conversations")
953
 
954
  return {"message": "Conversation deleted successfully"}
955
  except Exception as e:
956
- print(f"Error deleting conversation: {e}")
957
  raise HTTPException(status_code=500, detail="Failed to delete conversation")
958
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
959
  # Legacy endpoints for backward compatibility
960
  @app.post("/ask", response_model=QuestionResponse)
961
  async def ask_legacy(request: QuestionRequest, current_user: dict = Depends(get_current_user)):
@@ -965,7 +623,7 @@ async def ask_legacy(request: QuestionRequest, current_user: dict = Depends(get_
965
  @app.get("/modes")
966
  async def get_modes():
967
  return {
968
- "modes": ["hybrid", "context", "direct"],
969
  "default": "hybrid"
970
  }
971
 
@@ -975,162 +633,78 @@ async def get_examples():
975
  "fire_safety": [
976
  "What are the fire exit requirements for a commercial building?",
977
  "How many fire extinguishers are needed in an office space?",
978
- "What is the maximum travel distance to an exit?"
 
 
979
  ],
980
  "general": [
981
- "What is artificial intelligence?",
982
- "How does machine learning work?",
983
- "Explain the concept of neural networks"
 
 
984
  ]
985
  }
986
 
987
- # Admin endpoints (for system monitoring)
988
- @app.get("/admin/stats")
989
- async def get_admin_stats(current_user: dict = Depends(get_current_user)):
990
- # Simple admin check - in production, implement proper role-based access
991
- if current_user["email"] != "admin@yourai.com": # Change this to your admin email
992
- raise HTTPException(status_code=403, detail="Admin access required")
993
-
994
- try:
995
- # Get system statistics
996
- users_query = "SELECT COUNT(*) as count FROM users WHERE is_active = true"
997
- ais_query = "SELECT COUNT(*) as count FROM custom_ais WHERE is_active = true"
998
- conversations_query = "SELECT COUNT(*) as count FROM conversations WHERE is_active = true"
999
- messages_query = "SELECT COUNT(*) as count FROM messages"
1000
-
1001
- users_result = await db_manager.execute_query(users_query)
1002
- ais_result = await db_manager.execute_query(ais_query)
1003
- conversations_result = await db_manager.execute_query(conversations_query)
1004
- messages_result = await db_manager.execute_query(messages_query)
1005
-
1006
- return {
1007
- "total_users": users_result[0]['count'] if users_result else 0,
1008
- "total_custom_ais": ais_result[0]['count'] if ais_result else 0,
1009
- "total_conversations": conversations_result[0]['count'] if conversations_result else 0,
1010
- "total_messages": messages_result[0]['count'] if messages_result else 0,
1011
- "fire_safety_chunks": len(fire_safety_store.chunks) if fire_safety_store else 0,
1012
- "system_status": "healthy"
1013
- }
1014
- except Exception as e:
1015
- print(f"Error getting admin stats: {e}")
1016
- return {
1017
- "total_users": 0,
1018
- "total_custom_ais": 0,
1019
- "total_conversations": 0,
1020
- "total_messages": 0,
1021
- "fire_safety_chunks": 0,
1022
- "system_status": "error",
1023
- "error": str(e)
1024
  }
 
1025
 
1026
- # Cleanup endpoint for expired sessions
1027
- @app.post("/admin/cleanup")
1028
- async def cleanup_expired_data():
1029
- try:
1030
- # Clean up expired sessions
1031
- cleanup_query = "UPDATE user_sessions SET is_active = false WHERE expires_at < NOW() AND is_active = true"
1032
- await db_manager.execute_query(cleanup_query)
1033
-
1034
- return {"message": "Cleanup completed successfully"}
1035
- except Exception as e:
1036
- print(f"Error during cleanup: {e}")
1037
- return {"message": "Cleanup completed with some errors", "error": str(e)}
1038
-
1039
- # Rate limiting endpoint
1040
- @app.get("/rate-limit/{user_id}")
1041
- async def check_rate_limit(user_id: str, current_user: dict = Depends(get_current_user)):
1042
- # Only allow users to check their own rate limit or admin
1043
- if current_user["id"] != user_id and current_user["email"] != "admin@yourai.com":
1044
- raise HTTPException(status_code=403, detail="Access denied")
1045
-
1046
- try:
1047
- # Simple rate limiting: 100 requests per hour
1048
- key = f"rate_limit:{user_id}:{datetime.now().strftime('%Y-%m-%d-%H')}"
1049
-
1050
- if db_manager.redis:
1051
- current_count = await db_manager.redis.get(key) or 0
1052
- current_count = int(current_count)
1053
-
1054
- return {
1055
- "user_id": user_id,
1056
- "current_requests": current_count,
1057
- "limit": 100,
1058
- "remaining": max(0, 100 - current_count),
1059
- "reset_time": f"{datetime.now().strftime('%Y-%m-%d %H')}:59:59"
1060
- }
1061
- else:
1062
- return {
1063
- "user_id": user_id,
1064
- "current_requests": 0,
1065
- "limit": 100,
1066
- "remaining": 100,
1067
- "reset_time": f"{datetime.now().strftime('%Y-%m-%d %H')}:59:59",
1068
- "note": "Rate limiting not available (Redis not connected)"
1069
- }
1070
- except Exception as e:
1071
- print(f"Error checking rate limit: {e}")
1072
- raise HTTPException(status_code=500, detail="Rate limit check failed")
1073
-
1074
- # Health check for specific components
1075
- @app.get("/health/detailed")
1076
- async def detailed_health_check():
1077
- health_status = {
1078
- "timestamp": datetime.now().isoformat(),
1079
  "status": "healthy",
1080
- "components": {}
 
 
 
 
 
 
 
 
 
1081
  }
1082
 
1083
- # Check database
1084
- try:
1085
- await db_manager.execute_query("SELECT 1")
1086
- health_status["components"]["database"] = {"status": "healthy", "type": "PostgreSQL"}
1087
- except Exception as e:
1088
- health_status["components"]["database"] = {"status": "unhealthy", "error": str(e), "type": "PostgreSQL"}
1089
- health_status["status"] = "degraded"
1090
-
1091
- # Check Redis cache
1092
- try:
1093
- if db_manager.redis:
1094
- await db_manager.redis.ping()
1095
- health_status["components"]["cache"] = {"status": "healthy", "type": "Redis"}
1096
- else:
1097
- health_status["components"]["cache"] = {"status": "unavailable", "type": "Redis"}
1098
- except Exception as e:
1099
- health_status["components"]["cache"] = {"status": "unhealthy", "error": str(e), "type": "Redis"}
1100
- health_status["status"] = "degraded"
1101
 
1102
- # Check Cloudflare Worker
1103
- try:
1104
- if cloudflare_worker:
1105
- # Simple test query
1106
- test_response = await cloudflare_worker.query("Test", "Respond with 'OK'")
1107
- if "OK" in test_response or len(test_response) > 0:
1108
- health_status["components"]["ai_service"] = {"status": "healthy", "type": "Cloudflare Worker"}
1109
- else:
1110
- health_status["components"]["ai_service"] = {"status": "degraded", "type": "Cloudflare Worker"}
1111
- else:
1112
- health_status["components"]["ai_service"] = {"status": "unavailable", "type": "Cloudflare Worker"}
1113
- health_status["status"] = "degraded"
1114
- except Exception as e:
1115
- health_status["components"]["ai_service"] = {"status": "unhealthy", "error": str(e), "type": "Cloudflare Worker"}
1116
- health_status["status"] = "degraded"
1117
-
1118
- # Check fire safety knowledge store
1119
- try:
1120
- if fire_safety_store and len(fire_safety_store.chunks) > 0:
1121
- health_status["components"]["knowledge_store"] = {
1122
- "status": "healthy",
1123
- "type": "Fire Safety KB",
1124
- "chunks_loaded": len(fire_safety_store.chunks)
1125
- }
1126
- else:
1127
- health_status["components"]["knowledge_store"] = {"status": "degraded", "type": "Fire Safety KB"}
1128
- health_status["status"] = "degraded"
1129
- except Exception as e:
1130
- health_status["components"]["knowledge_store"] = {"status": "unhealthy", "error": str(e), "type": "Fire Safety KB"}
1131
- health_status["status"] = "degraded"
1132
-
1133
- return health_status
1134
 
1135
  if __name__ == "__main__":
1136
  import uvicorn
 
1
+ import asyncio
2
  import os
3
  import json
4
+ import uuid
5
  import hashlib
6
  import shutil
7
+ import logging
8
+ from datetime import datetime
 
 
9
  from pathlib import Path
10
+ from typing import List, Optional, Dict, Any
11
 
12
+ from fastapi import FastAPI, HTTPException, Depends, UploadFile, File, Form
 
13
  from fastapi.middleware.cors import CORSMiddleware
14
+ from fastapi.responses import JSONResponse
15
  from pydantic import BaseModel, EmailStr
 
 
16
 
17
+ # Import LightRAG manager
18
+ from lightrag_manager import (
19
+ CloudflareWorker,
20
+ LightRAGManager,
21
+ initialize_lightrag_manager,
22
+ get_lightrag_manager
23
+ )
24
 
25
+ # Configure logging
26
+ logging.basicConfig(level=logging.INFO)
27
+ logger = logging.getLogger(__name__)
 
28
 
29
+ # Get environment variables from HF Spaces secrets
30
+ CLOUDFLARE_API_KEY = os.getenv("CLOUDFLARE_API_KEY", "")
31
+ CLOUDFLARE_ACCOUNT_ID = os.getenv("CLOUDFLARE_ACCOUNT_ID", "")
32
+ DATABASE_URL = os.getenv("DATABASE_URL", "")
33
+ REDIS_URL = os.getenv("REDIS_URL", "")
34
+ JWT_SECRET_KEY = os.getenv("JWT_SECRET_KEY", "your-secret-key")
35
+
36
+ # Models
37
+ EMBEDDING_MODEL = "@cf/baai/bge-m3"
38
+ LLM_MODEL = "@cf/meta/llama-3.2-3b-instruct"
39
+
40
+ # API Base URL
41
+ API_BASE_URL = f"https://api.cloudflare.com/client/v4/accounts/{CLOUDFLARE_ACCOUNT_ID}/ai/run/"
42
 
43
+ # File upload settings
44
+ USER_DATA_DIR = os.getenv("USER_DATA_DIR", "./user_data")
45
+ MAX_UPLOAD_SIZE = int(os.getenv("MAX_UPLOAD_SIZE", "10485760")) # 10MB
46
+
47
+ # Create FastAPI app
48
+ app = FastAPI(title="YourAI - LightRAG Powered", version="2.0.0")
49
+
50
+ # Add CORS middleware
51
+ app.add_middleware(
52
+ CORSMiddleware,
53
+ allow_origins=["*"],
54
+ allow_credentials=True,
55
+ allow_methods=["*"],
56
+ allow_headers=["*"],
57
+ )
58
+
59
+ # Pydantic models
60
  class QuestionRequest(BaseModel):
61
  question: str
62
+ mode: Optional[str] = "hybrid"
63
  conversation_id: Optional[str] = None
64
 
 
 
 
 
65
  class QuestionResponse(BaseModel):
66
  answer: str
67
  mode: str
68
  status: str
69
  conversation_id: Optional[str] = None
70
 
71
+ class UserRegisterRequest(BaseModel):
72
+ email: EmailStr
73
+ name: str
74
+
75
+ class UserLoginRequest(BaseModel):
76
+ email: EmailStr
77
+
78
+ class CustomAIRequest(BaseModel):
79
+ name: str
80
+ description: str
81
+
82
  class FileUploadResponse(BaseModel):
83
  filename: str
84
  size: int
85
  message: str
86
 
87
+ # Global variables
88
+ cloudflare_worker: Optional[CloudflareWorker] = None
89
+ lightrag_manager: Optional[LightRAGManager] = None
90
+
91
+ # Database Manager (simplified version - you'll need to implement based on your DB)
92
  class DatabaseManager:
93
+ def __init__(self, database_url: str):
94
+ self.database_url = database_url
95
+ # Initialize your database connection here
 
 
 
 
 
 
 
 
 
 
 
 
96
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
  async def execute_query(self, query: str, *args):
98
+ # Implement your database query execution
99
+ # For now, return empty result
100
+ return []
 
 
 
 
 
 
 
 
 
 
101
 
102
+ async def cache_set(self, key: str, value: Any, ttl: int = 3600):
103
+ # Implement Redis cache set
104
+ pass
 
 
 
 
 
 
105
 
106
  async def cache_get(self, key: str):
107
+ # Implement Redis cache get
 
 
 
 
 
 
108
  return None
109
 
110
  async def cache_delete(self, key: str):
111
+ # Implement Redis cache delete
112
+ pass
 
 
 
 
 
 
113
 
114
  # Initialize database manager
115
+ db_manager = DatabaseManager(DATABASE_URL)
116
 
117
+ # Authentication functions (simplified)
118
+ async def get_current_user():
119
+ # Implement your JWT authentication here
120
+ # For now, return a mock user
121
+ return {"id": "demo_user", "email": "demo@example.com", "name": "Demo User"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
122
 
123
+ # Conversation Manager
124
  class ConversationManager:
125
  @staticmethod
126
  async def create_conversation(user_id: str, ai_type: str, ai_id: Optional[str] = None, title: Optional[str] = None) -> str:
127
  conversation_id = str(uuid.uuid4())
128
+ # Implement database storage
 
 
 
 
 
 
 
 
 
 
 
 
 
129
  return conversation_id
130
 
131
  @staticmethod
132
  async def add_message(conversation_id: str, role: str, content: str, metadata: Optional[dict] = None) -> str:
133
  message_id = str(uuid.uuid4())
134
+ # Implement database storage
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  return message_id
136
 
137
  @staticmethod
138
  async def get_conversation_messages(conversation_id: str, user_id: str) -> List[dict]:
139
+ # Implement database retrieval
140
+ return []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
142
+ # Custom AI Manager
143
  class CustomAIManager:
144
  @staticmethod
145
  async def create_custom_ai(user_id: str, name: str, description: str, knowledge_files: List[dict]) -> str:
146
  ai_id = str(uuid.uuid4())
147
+ # Implement database storage
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  return ai_id
149
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
150
  @staticmethod
151
  async def get_ai_by_id(ai_id: str, user_id: str) -> Optional[dict]:
152
+ # Implement database retrieval
153
+ return {"id": ai_id, "name": "Custom AI", "description": "Test AI"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
 
155
+ @staticmethod
156
+ async def get_user_ais(user_id: str) -> List[dict]:
157
+ # Implement database retrieval
158
+ return []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
159
 
160
+ # User Manager
161
+ class UserManager:
162
+ @staticmethod
163
+ async def create_user(email: str, name: str) -> dict:
164
+ user_id = str(uuid.uuid4())
165
+ # Implement database storage
166
+ return {"id": user_id, "email": email, "name": name}
 
167
 
168
+ @staticmethod
169
+ async def get_user_by_email(email: str) -> Optional[dict]:
170
+ # Implement database retrieval
171
+ return None
172
 
173
+ # Startup event
174
+ @app.on_event("startup")
175
+ async def startup_event():
176
+ global cloudflare_worker, lightrag_manager
 
177
 
178
+ logger.info("Starting up YourAI with LightRAG integration...")
 
179
 
180
  # Initialize Cloudflare worker
181
  cloudflare_worker = CloudflareWorker(
182
  cloudflare_api_key=CLOUDFLARE_API_KEY,
183
  api_base_url=API_BASE_URL,
184
  llm_model_name=LLM_MODEL,
185
+ embedding_model_name=EMBEDDING_MODEL,
186
  )
187
 
188
+ # Initialize LightRAG manager
189
+ lightrag_manager = await initialize_lightrag_manager(cloudflare_worker)
 
190
 
191
+ # Auto-migrate existing knowledge (fire safety will be handled automatically)
192
+ logger.info("LightRAG system initialized successfully")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
193
 
194
+ # Shutdown event
195
+ @app.on_event("shutdown")
196
+ async def shutdown_event():
197
+ global lightrag_manager
198
+ if lightrag_manager:
199
+ await lightrag_manager.cleanup()
200
+ logger.info("Shutdown complete")
 
201
 
202
+ # Root endpoint
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
203
  @app.get("/")
204
  async def root():
205
+ return {"message": "YourAI - LightRAG Powered API", "version": "2.0.0", "status": "running"}
206
 
207
+ # Health check
208
  @app.get("/health")
209
  async def health_check():
 
 
 
 
 
 
 
 
 
 
 
 
 
210
  return {
211
  "status": "healthy",
212
+ "lightrag_initialized": lightrag_manager is not None,
213
+ "cloudflare_initialized": cloudflare_worker is not None
 
 
 
 
214
  }
215
 
216
+ # User registration
217
+ @app.post("/register")
218
+ async def register_user(request: UserRegisterRequest):
219
  try:
220
+ user = await UserManager.create_user(request.email, request.name)
221
+ return {"message": "User created successfully", "user": user}
 
 
 
 
 
 
 
 
222
  except Exception as e:
223
+ raise HTTPException(status_code=500, detail=f"Registration failed: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
224
 
225
+ # User login
226
+ @app.post("/login")
227
+ async def login_user(request: UserLoginRequest):
228
+ try:
229
+ user = await UserManager.get_user_by_email(request.email)
230
+ if not user:
231
+ # Auto-create user for demo
232
+ user = await UserManager.create_user(request.email, "Demo User")
233
+
234
+ # Generate JWT token (implement your JWT logic)
235
+ token = "demo_token"
236
+ return {"token": token, "user": user}
237
+ except Exception as e:
238
+ raise HTTPException(status_code=500, detail=f"Login failed: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
239
 
240
+ # Fire Safety Chat - with LightRAG
241
  @app.post("/chat/fire-safety", response_model=QuestionResponse)
242
  async def chat_fire_safety(request: QuestionRequest, current_user: dict = Depends(get_current_user)):
243
+ if not lightrag_manager:
244
+ raise HTTPException(status_code=503, detail="LightRAG system not initialized")
245
 
246
  user_id = current_user["id"]
247
 
 
251
  if not conversation_id:
252
  conversation_id = await ConversationManager.create_conversation(user_id, "fire-safety")
253
 
254
+ # Add user message to database
255
  await ConversationManager.add_message(conversation_id, "user", request.question)
256
 
257
+ # Get LightRAG instance for fire safety
258
+ rag = await lightrag_manager.get_rag_instance("fire-safety")
 
259
 
260
+ # Query with conversation memory
261
+ response = await lightrag_manager.query_with_memory(
262
+ rag=rag,
263
+ question=request.question,
264
+ conversation_id=conversation_id,
265
+ mode=request.mode or "hybrid",
266
+ max_memory_turns=10
267
+ )
268
 
269
+ # Add assistant response to database
270
+ await ConversationManager.add_message(
271
+ conversation_id,
272
+ "assistant",
273
+ response,
274
+ {"mode": request.mode, "lightrag_used": True}
275
+ )
276
 
277
+ return QuestionResponse(
278
+ answer=response,
279
+ mode=request.mode,
280
+ status="success",
281
+ conversation_id=conversation_id
282
+ )
283
 
 
284
  except Exception as e:
285
+ logger.error(f"Fire safety chat error: {e}")
286
  raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
287
 
288
+ # General Chat - with enhanced memory
289
  @app.post("/chat/general", response_model=QuestionResponse)
290
  async def chat_general(request: QuestionRequest, current_user: dict = Depends(get_current_user)):
291
  if not cloudflare_worker:
 
302
  # Add user message
303
  await ConversationManager.add_message(conversation_id, "user", request.question)
304
 
305
+ # Get conversation history for context
306
+ conversation_history = await ConversationManager.get_conversation_messages(conversation_id, user_id)
307
+
308
+ # Build context from recent messages
309
+ context = ""
310
+ recent_messages = conversation_history[-20:] if len(conversation_history) > 20 else conversation_history
311
+
312
+ if len(recent_messages) > 2:
313
+ context = "\n\nRecent conversation:\n"
314
+ for msg in recent_messages[:-1]:
315
+ role = msg['role']
316
+ content = msg['content'][:150] + "..." if len(msg['content']) > 150 else msg['content']
317
+ context += f"{role.title()}: {content}\n"
318
+
319
+ # Enhanced system prompt with memory
320
+ system_prompt = f"""You are a helpful general AI assistant. You have access to the conversation history and should provide contextually aware responses.
321
+
322
+ {context}
323
+
324
+ Provide accurate, helpful, and engaging responses that take into account the conversation context."""
325
 
326
  response = await cloudflare_worker.query(request.question, system_prompt)
327
 
328
  # Add assistant response
329
+ await ConversationManager.add_message(
330
+ conversation_id,
331
+ "assistant",
332
+ response,
333
+ {"mode": request.mode, "context_used": bool(context)}
334
+ )
335
+
336
+ return QuestionResponse(
337
+ answer=response,
338
+ mode=request.mode,
339
+ status="success",
340
+ conversation_id=conversation_id
341
+ )
342
 
 
343
  except Exception as e:
344
+ logger.error(f"General chat error: {e}")
345
  raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
346
 
347
+ # Custom AI Chat - with LightRAG
348
  @app.post("/chat/custom/{ai_id}", response_model=QuestionResponse)
349
  async def chat_custom_ai(
350
  ai_id: str,
351
  request: QuestionRequest,
352
  current_user: dict = Depends(get_current_user)
353
  ):
354
+ if not lightrag_manager:
355
+ raise HTTPException(status_code=503, detail="LightRAG system not initialized")
356
 
357
  user_id = current_user["id"]
358
 
 
370
  # Add user message
371
  await ConversationManager.add_message(conversation_id, "user", request.question)
372
 
373
+ # Get LightRAG instance for this custom AI
374
+ rag = await lightrag_manager.get_rag_instance("custom", user_id, ai_id)
375
 
376
+ # Query with conversation memory
377
+ response = await lightrag_manager.query_with_memory(
378
+ rag=rag,
379
+ question=request.question,
380
+ conversation_id=conversation_id,
381
+ mode=request.mode or "hybrid",
382
+ max_memory_turns=10
383
+ )
384
 
385
+ # Add assistant response
386
+ await ConversationManager.add_message(
387
+ conversation_id,
388
+ "assistant",
389
+ response,
390
+ {
391
+ "mode": request.mode,
392
+ "ai_name": ai_info['name'],
393
+ "lightrag_used": True
394
+ }
395
+ )
396
 
397
+ return QuestionResponse(
398
+ answer=response,
399
+ mode=request.mode,
400
+ status="success",
401
+ conversation_id=conversation_id
402
+ )
403
+
404
+ except Exception as e:
405
+ logger.error(f"Custom AI chat error: {e}")
406
+ raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
407
 
408
+ # File upload endpoint
409
+ @app.post("/upload", response_model=List[FileUploadResponse])
410
+ async def upload_files(
411
+ files: List[UploadFile] = File(...),
412
+ current_user: dict = Depends(get_current_user)
413
+ ):
414
+ user_id = current_user["id"]
415
+ user_upload_dir = Path(USER_DATA_DIR) / f"user_{user_id}" / "uploads"
416
+ user_upload_dir.mkdir(parents=True, exist_ok=True)
417
+
418
+ uploaded_files = []
419
+ allowed_extensions = {'.txt', '.md', '.json', '.pdf', '.docx'}
420
+
421
+ for file in files:
422
+ if file.size > MAX_UPLOAD_SIZE:
423
+ raise HTTPException(
424
+ status_code=413,
425
+ detail=f"File {file.filename} too large. Max size: {MAX_UPLOAD_SIZE} bytes"
426
+ )
427
+
428
+ file_extension = Path(file.filename).suffix.lower()
429
+ if file_extension not in allowed_extensions:
430
+ raise HTTPException(
431
+ status_code=400,
432
+ detail=f"File type {file_extension} not allowed. Allowed: {allowed_extensions}"
433
+ )
434
+
435
+ file_path = user_upload_dir / file.filename
436
+ with open(file_path, "wb") as buffer:
437
+ shutil.copyfileobj(file.file, buffer)
438
+
439
+ uploaded_files.append(FileUploadResponse(
440
+ filename=file.filename,
441
+ size=file_path.stat().st_size,
442
+ message="Uploaded successfully"
443
+ ))
444
+
445
+ return uploaded_files
446
 
447
+ # Create custom AI - with LightRAG
448
+ @app.post("/create-custom-ai")
449
+ async def create_custom_ai(
450
+ ai_data: CustomAIRequest,
451
+ current_user: dict = Depends(get_current_user)
452
+ ):
453
+ if not lightrag_manager:
454
+ raise HTTPException(status_code=503, detail="LightRAG system not initialized")
455
+
456
+ user_id = current_user["id"]
457
+ user_upload_dir = Path(USER_DATA_DIR) / f"user_{user_id}" / "uploads"
458
+
459
+ if not user_upload_dir.exists() or not list(user_upload_dir.glob("*")):
460
+ raise HTTPException(status_code=400, detail="No files uploaded. Please upload knowledge files first.")
461
+
462
+ # Read uploaded files
463
+ uploaded_files = list(user_upload_dir.glob("*"))
464
+ knowledge_texts = []
465
+
466
+ for file_path in uploaded_files:
467
+ if file_path.is_file():
468
+ try:
469
+ # Read file content based on extension
470
+ if file_path.suffix.lower() in ['.txt', '.md']:
471
+ with open(file_path, 'r', encoding='utf-8') as f:
472
+ content = f.read()
473
+ knowledge_texts.append(content)
474
+ elif file_path.suffix.lower() == '.json':
475
+ with open(file_path, 'r', encoding='utf-8') as f:
476
+ json_data = json.load(f)
477
+ # Convert JSON to text representation
478
+ content = json.dumps(json_data, indent=2)
479
+ knowledge_texts.append(content)
480
+ # Add more file type handlers as needed
481
+
482
+ except Exception as e:
483
+ logger.warning(f"Error reading file {file_path}: {e}")
484
+ continue
485
+
486
+ if not knowledge_texts:
487
+ raise HTTPException(status_code=400, detail="No readable content found in uploaded files")
488
+
489
+ # Generate AI ID
490
+ ai_id = str(uuid.uuid4())
491
+
492
+ try:
493
+ # Create LightRAG instance with knowledge
494
+ await lightrag_manager.create_custom_rag(user_id, ai_id, knowledge_texts)
495
+
496
+ # Store in database
497
+ knowledge_files_metadata = [
498
+ {"filename": f.name, "size": f.stat().st_size}
499
+ for f in uploaded_files if f.is_file()
500
+ ]
501
+ db_ai_id = await CustomAIManager.create_custom_ai(
502
+ user_id, ai_data.name, ai_data.description, knowledge_files_metadata
503
+ )
504
 
505
+ ai_info = {
506
+ "id": ai_id,
507
+ "name": ai_data.name,
508
+ "description": ai_data.description,
509
+ "created_at": datetime.now().isoformat(),
510
+ "files_count": len(uploaded_files),
511
+ "knowledge_chunks": len(knowledge_texts)
512
+ }
513
 
514
+ return {
515
+ "ai_id": ai_id,
516
+ "message": "Custom AI created successfully with LightRAG knowledge base",
517
+ "ai_info": ai_info
518
+ }
519
 
 
520
  except Exception as e:
521
+ logger.error(f"Error creating custom AI: {e}")
522
+ raise HTTPException(status_code=500, detail=f"Failed to create custom AI: {str(e)}")
523
+
524
+ # Get user's custom AIs
525
+ @app.get("/my-ais")
526
+ async def get_user_ais(current_user: dict = Depends(get_current_user)):
527
+ user_id = current_user["id"]
528
+ try:
529
+ ais = await CustomAIManager.get_user_ais(user_id)
530
+ return {"ais": ais, "count": len(ais)}
531
+ except Exception as e:
532
+ logger.error(f"Error getting user AIs: {e}")
533
+ return {"ais": [], "count": 0}
534
 
535
  # Get user conversations
536
  @app.get("/conversations")
 
543
  return {"conversations": cached}
544
 
545
  try:
546
+ # In a real implementation, query your database
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
547
  conversations = []
 
 
 
 
 
 
 
 
 
 
 
 
548
 
549
  # Cache for 15 minutes
550
  await db_manager.cache_set(f"user:{user_id}:conversations", conversations, 900)
551
 
552
  return {"conversations": conversations}
553
  except Exception as e:
554
+ logger.error(f"Error getting conversations: {e}")
555
  return {"conversations": []}
556
 
557
  # Get specific conversation messages
 
563
  messages = await ConversationManager.get_conversation_messages(conversation_id, user_id)
564
  return {"messages": messages}
565
  except Exception as e:
566
+ logger.error(f"Error getting conversation messages: {e}")
567
  return {"messages": []}
568
 
569
  # Delete conversation
 
572
  user_id = current_user["id"]
573
 
574
  try:
575
+ # In a real implementation, update your database
576
+ # query = "UPDATE conversations SET is_active = false WHERE id = $1 AND user_id = $2"
577
+ # await db_manager.execute_query(query, conversation_id, user_id)
578
 
579
  # Invalidate cache
580
  await db_manager.cache_delete(f"user:{user_id}:conversations")
581
 
582
  return {"message": "Conversation deleted successfully"}
583
  except Exception as e:
584
+ logger.error(f"Error deleting conversation: {e}")
585
  raise HTTPException(status_code=500, detail="Failed to delete conversation")
586
 
587
+ # Clear conversation memory (LightRAG specific)
588
+ @app.delete("/conversations/{conversation_id}/memory")
589
+ async def clear_conversation_memory(
590
+ conversation_id: str,
591
+ current_user: dict = Depends(get_current_user)
592
+ ):
593
+ """Clear conversation memory for LightRAG"""
594
+ if lightrag_manager:
595
+ lightrag_manager.clear_conversation_memory(conversation_id)
596
+
597
+ return {"message": "Conversation memory cleared"}
598
+
599
+ # Get conversation memory status
600
+ @app.get("/conversations/{conversation_id}/memory")
601
+ async def get_conversation_memory_status(
602
+ conversation_id: str,
603
+ current_user: dict = Depends(get_current_user)
604
+ ):
605
+ """Get conversation memory status"""
606
+ if not lightrag_manager:
607
+ return {"has_memory": False, "message_count": 0}
608
+
609
+ memory = lightrag_manager.conversation_memory.get(conversation_id, [])
610
+
611
+ return {
612
+ "has_memory": len(memory) > 0,
613
+ "message_count": len(memory),
614
+ "last_updated": memory[-1]["timestamp"] if memory else None
615
+ }
616
+
617
  # Legacy endpoints for backward compatibility
618
  @app.post("/ask", response_model=QuestionResponse)
619
  async def ask_legacy(request: QuestionRequest, current_user: dict = Depends(get_current_user)):
 
623
  @app.get("/modes")
624
  async def get_modes():
625
  return {
626
+ "modes": ["hybrid", "local", "global", "naive"],
627
  "default": "hybrid"
628
  }
629
 
 
633
  "fire_safety": [
634
  "What are the fire exit requirements for a commercial building?",
635
  "How many fire extinguishers are needed in an office space?",
636
+ "What is the maximum travel distance to an exit?",
637
+ "What are the requirements for emergency lighting?",
638
+ "How often should fire safety equipment be inspected?"
639
  ],
640
  "general": [
641
+ "How do I create a presentation?",
642
+ "What is machine learning?",
643
+ "Explain quantum computing",
644
+ "Help me plan a project timeline",
645
+ "What are the best practices for remote work?"
646
  ]
647
  }
648
 
649
+ # System information endpoint
650
+ @app.get("/system/info")
651
+ async def get_system_info():
652
+ return {
653
+ "service": "YourAI",
654
+ "version": "2.0.0",
655
+ "features": {
656
+ "lightrag_integration": True,
657
+ "conversation_memory": True,
658
+ "custom_ai_support": True,
659
+ "file_upload": True,
660
+ "multi_model_support": True
661
+ },
662
+ "models": {
663
+ "llm": LLM_MODEL,
664
+ "embedding": EMBEDDING_MODEL
665
+ },
666
+ "storage": {
667
+ "graph": "NetworkXStorage",
668
+ "vector": "NanoVectorDBStorage",
669
+ "conversation_memory": "In-Memory"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
670
  }
671
+ }
672
 
673
+ # System status endpoint
674
+ @app.get("/system/status")
675
+ async def get_system_status():
676
+ status = {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
677
  "status": "healthy",
678
+ "components": {
679
+ "lightrag": lightrag_manager is not None,
680
+ "cloudflare_worker": cloudflare_worker is not None,
681
+ "database": True, # Implement actual DB health check
682
+ "cache": True # Implement actual cache health check
683
+ },
684
+ "memory": {
685
+ "active_conversations": len(lightrag_manager.conversation_memory) if lightrag_manager else 0,
686
+ "rag_instances": len(lightrag_manager.rag_instances) if lightrag_manager else 0
687
+ }
688
  }
689
 
690
+ # Overall health check
691
+ all_healthy = all(status["components"].values())
692
+ status["status"] = "healthy" if all_healthy else "unhealthy"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
693
 
694
+ return status
695
+
696
+ # Test endpoint for development
697
+ @app.get("/test")
698
+ async def test_endpoint():
699
+ return {
700
+ "message": "Test endpoint working",
701
+ "timestamp": datetime.now().isoformat(),
702
+ "environment": {
703
+ "cloudflare_configured": bool(CLOUDFLARE_API_KEY),
704
+ "database_configured": bool(DATABASE_URL),
705
+ "redis_configured": bool(REDIS_URL)
706
+ }
707
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
708
 
709
  if __name__ == "__main__":
710
  import uvicorn