File size: 11,912 Bytes
5cf8dc3 30e5fc4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 |
"""Codebase analysis agent to detect framework, dependencies, and structure."""
from __future__ import annotations
import json
import os
import re
from pathlib import Path
from typing import Any, Dict, List, Optional
from schemas import ReadinessRequest
class CodebaseAnalyzer:
"""Analyzes codebase to extract framework, dependencies, and deployment info."""
def analyze_folder(self, folder_path: str) -> Dict[str, Any]:
"""Analyze a local folder."""
path = Path(folder_path)
if not path.exists():
return {"error": f"Folder not found: {folder_path}"}
analysis = {
"framework": None,
"platform": None,
"dependencies": [],
"package_manager": None,
"has_docker": False,
"has_docker_compose": False,
"has_k8s": False,
"config_files": [],
"readme_path": None,
"code_summary": "",
"detected_files": []
}
# Check for package files
package_files = {
"package.json": "npm",
"requirements.txt": "pip",
"Pipfile": "pipenv",
"poetry.lock": "poetry",
"go.mod": "go",
"Cargo.toml": "rust",
"pom.xml": "maven",
"build.gradle": "gradle",
}
for file_name, manager in package_files.items():
file_path = path / file_name
if file_path.exists():
analysis["package_manager"] = manager
analysis["config_files"].append(file_name)
analysis["detected_files"].append(file_name)
# Extract dependencies
if file_name == "package.json":
deps = self._parse_package_json(file_path)
analysis["dependencies"] = deps.get("dependencies", [])
analysis["framework"] = self._detect_framework_from_package_json(deps)
elif file_name == "requirements.txt":
analysis["dependencies"] = self._parse_requirements_txt(file_path)
analysis["framework"] = self._detect_framework_from_requirements(analysis["dependencies"])
# Check for deployment configs
dockerfile = path / "Dockerfile"
docker_compose = path / "docker-compose.yml"
k8s_dir = path / "k8s" or path / "kubernetes"
if dockerfile.exists():
analysis["has_docker"] = True
analysis["detected_files"].append("Dockerfile")
if docker_compose.exists():
analysis["has_docker_compose"] = True
analysis["detected_files"].append("docker-compose.yml")
if k8s_dir.exists():
analysis["has_k8s"] = True
analysis["detected_files"].append("k8s/")
# Find README
for readme_name in ["README.md", "readme.md", "README.txt"]:
readme_path = path / readme_name
if readme_path.exists():
analysis["readme_path"] = str(readme_path)
break
# Detect platform from config files
vercel_json = path / "vercel.json"
netlify_toml = path / "netlify.toml"
if vercel_json.exists():
analysis["platform"] = "vercel"
analysis["detected_files"].append("vercel.json")
elif netlify_toml.exists():
analysis["platform"] = "netlify"
analysis["detected_files"].append("netlify.toml")
# Generate code summary
analysis["code_summary"] = self._generate_code_summary(analysis)
return analysis
def analyze_github_repo(self, repo_url: str) -> Dict[str, Any]:
"""Analyze a GitHub repository (placeholder - would use GitHub API)."""
# Extract owner/repo from URL
match = re.search(r"github\.com[:/]([\w\-]+)/([\w\-\.]+)", repo_url)
if not match:
return {"error": "Invalid GitHub URL"}
owner, repo = match.groups()
# In production, would use GitHub API to fetch files
# For now, return structure
return {
"repo": f"{owner}/{repo}",
"url": repo_url,
"framework": None, # Would be detected from API
"platform": None,
"dependencies": [],
"message": "GitHub repo analysis requires API integration"
}
def _parse_package_json(self, file_path: Path) -> Dict[str, Any]:
"""Parse package.json file."""
try:
with open(file_path, 'r') as f:
return json.load(f)
except Exception:
return {}
def _parse_requirements_txt(self, file_path: Path) -> List[str]:
"""Parse requirements.txt file."""
deps = []
try:
with open(file_path, 'r') as f:
for line in f:
line = line.strip()
if line and not line.startswith('#'):
deps.append(line.split('==')[0].split('>=')[0].split('<=')[0])
except Exception:
pass
return deps
def _detect_framework_from_package_json(self, package_json: Dict) -> Optional[str]:
"""Detect framework from package.json dependencies."""
deps = {**package_json.get("dependencies", {}), **package_json.get("devDependencies", {})}
deps_lower = {k.lower(): v for k, v in deps.items()}
# Framework detection
if "next" in deps_lower:
return "next.js"
elif "react" in deps_lower and "react-dom" in deps_lower:
return "react"
elif "vue" in deps_lower:
return "vue"
elif "angular" in deps_lower or "@angular/core" in deps_lower:
return "angular"
elif "svelte" in deps_lower:
return "svelte"
elif "express" in deps_lower:
return "express"
elif "@nestjs/core" in deps_lower:
return "nestjs"
elif "remix" in deps_lower:
return "remix"
return None
def _detect_framework_from_requirements(self, deps: List[str]) -> Optional[str]:
"""Detect framework from Python requirements."""
deps_lower = [d.lower() for d in deps]
if "django" in deps_lower:
return "django"
elif "fastapi" in deps_lower:
return "fastapi"
elif "flask" in deps_lower:
return "flask"
elif "starlette" in deps_lower:
return "starlette"
return None
def _generate_code_summary(self, analysis: Dict[str, Any]) -> str:
"""Generate code summary from analysis."""
parts = []
if analysis["framework"]:
parts.append(f"Framework: {analysis['framework']}")
if analysis["package_manager"]:
parts.append(f"Package manager: {analysis['package_manager']}")
if analysis["dependencies"]:
parts.append(f"Dependencies: {len(analysis['dependencies'])} packages")
if analysis["has_docker"]:
parts.append("Docker configuration detected")
if analysis["has_k8s"]:
parts.append("Kubernetes configuration detected")
return ". ".join(parts) if parts else "Codebase analysis complete"
def update_readme(self, readme_path: str, deployment_info: Dict[str, Any]) -> str:
"""Update README with deployment information."""
try:
with open(readme_path, 'r') as f:
content = f.read()
except Exception:
content = "# Deployment\n\n"
# Add deployment section if not exists
if "## Deployment" not in content and "### Deployment" not in content:
content += "\n\n## Deployment\n\n"
# Add deployment info
deployment_section = f"""
### Deployment Status
- **Platform**: {deployment_info.get('platform', 'Not configured')}
- **Framework**: {deployment_info.get('framework', 'Unknown')}
- **Status**: {deployment_info.get('status', 'Ready for deployment')}
### Quick Deploy
{deployment_info.get('deployment_instructions', 'Configure deployment in the Deployment Readiness Copilot')}
---
*Last updated by Deployment Readiness Copilot*
"""
# Insert or update deployment section
if "## Deployment" in content:
# Replace existing deployment section
pattern = r"## Deployment.*?(?=\n##|\Z)"
content = re.sub(pattern, f"## Deployment{deployment_section}", content, flags=re.DOTALL)
else:
content += deployment_section
try:
with open(readme_path, 'w') as f:
f.write(content)
return "README updated successfully"
except Exception as e:
return f"Failed to update README: {str(e)}"
def generate_architecture_diagram(self, analysis: Dict[str, Any]) -> str:
"""Generate Mermaid.js architecture diagram from analysis."""
framework = analysis.get("framework", "Unknown")
platform = analysis.get("platform", "Unknown")
dependencies = analysis.get("dependencies", [])
has_docker = analysis.get("has_docker")
mermaid = "graph TD\n"
mermaid += f" User((User)) --> LB[Load Balancer / {platform}]\n"
mermaid += f" LB --> App[{framework} App]\n"
if has_docker:
mermaid += " subgraph Container [Docker Container]\n"
mermaid += " App\n"
mermaid += " end\n"
if "postgresql" in dependencies or "psycopg2" in dependencies:
mermaid += " App --> DB[(PostgreSQL)]\n"
elif "mysql" in dependencies:
mermaid += " App --> DB[(MySQL)]\n"
elif "mongo" in dependencies or "pymongo" in dependencies:
mermaid += " App --> DB[(MongoDB)]\n"
elif "redis" in dependencies:
mermaid += " App --> Cache[(Redis)]\n"
return mermaid
def identify_fixes(self, analysis: Dict[str, Any]) -> List[Dict[str, str]]:
"""Identify potential fixes for the codebase."""
fixes = []
# Check for missing requirements.txt
if analysis.get("package_manager") == "pip" and "requirements.txt" not in analysis.get("config_files", []):
fixes.append({
"id": "missing_requirements",
"title": "Missing requirements.txt",
"description": "Python project detected but no requirements.txt found.",
"fix_action": "create_file",
"file": "requirements.txt",
"content": "# Generated requirements\nflask\nrequests\n"
})
# Check for missing Dockerfile
if not analysis.get("has_docker"):
fixes.append({
"id": "missing_dockerfile",
"title": "Missing Dockerfile",
"description": "Containerization is recommended for deployment.",
"fix_action": "create_file",
"file": "Dockerfile",
"content": f"FROM python:3.9-slim\nWORKDIR /app\nCOPY . .\nRUN pip install -r requirements.txt\nCMD [\"python\", \"app.py\"]"
})
# Check for missing README
if not analysis.get("readme_path"):
fixes.append({
"id": "missing_readme",
"title": "Missing README",
"description": "Documentation is essential for collaboration.",
"fix_action": "create_file",
"file": "README.md",
"content": f"# {analysis.get('framework', 'Project')}\n\nGenerated by Deploy Ready Copilot."
})
return fixes
|