MRiabov's picture
gh scraping improved and split to utils.
84edce3
raw
history blame
2.36 kB
#!/usr/bin/env python3
"""
Repository tree utilities: build nested structures from local directories or flat path lists.
"""
from __future__ import annotations
from pathlib import Path
from typing import Dict, Any, List
def _tree_node(name: str, node_type: str) -> Dict[str, Any]:
assert node_type in ("dir", "file")
if node_type == "dir":
return {"name": name, "type": node_type, "children": []}
return {"name": name, "type": node_type}
def _insert_path_into_tree(root: Dict[str, Any], parts: List[str]):
node = root
for i, part in enumerate(parts):
is_last = i == len(parts) - 1
if not is_last:
# find or create directory child
found = None
for ch in node["children"]:
if ch["type"] == "dir" and ch["name"] == part:
found = ch
break
if found is None:
found = _tree_node(part, "dir")
node["children"].append(found)
node = found
else:
# file leaf
node["children"].append(_tree_node(part, "file"))
def build_tree_from_local_dir(base: Path, only_md: bool) -> Dict[str, Any]:
root = _tree_node(base.name, "dir")
for p in base.rglob("*"):
if not p.is_file():
continue
if only_md and not p.name.lower().endswith(".md"):
continue
rel = p.relative_to(base)
parts = list(rel.parts)
if parts:
_insert_path_into_tree(root, parts)
return root
def build_tree_from_paths(paths: List[str], root_name: str = "repo") -> Dict[str, Any]:
root = _tree_node(root_name, "dir")
for path in paths:
parts = [p for p in Path(path).parts if p]
if parts:
_insert_path_into_tree(root, parts)
return root
def filter_paths_by_directories(paths: List[str], dir_names: List[str]) -> List[str]:
"""
Keep only those paths that are under any of the given directory names.
A match occurs when a path starts with "<dir>/" or contains "/<dir>/".
"""
if not dir_names:
return paths
name_set = set(dir_names)
out: List[str] = []
for p in paths:
for dn in name_set:
if p.startswith(f"{dn}/") or f"/{dn}/" in p:
out.append(p)
break
return out