MRiabov commited on
Commit
857c602
·
1 Parent(s): c554ef1

Add missing description fetching from GitHub API

Browse files
data_collection_utils/awesome_final_repos.py CHANGED
@@ -36,9 +36,12 @@ import re
36
  import sys
37
  import yaml
38
  from dotenv import load_dotenv
39
- from github_api_utils import fetch_repo_readme_markdown
40
  import pandas as pd
41
- from github_api_utils import github_headers
 
 
 
 
42
 
43
  load_dotenv()
44
 
@@ -76,7 +79,10 @@ async def fetch_readme_with_cache(
76
 
77
 
78
  def canonical_repo_url(url: str) -> Optional[str]:
79
- p = urlparse(url)
 
 
 
80
  if p.netloc != "github.com" or p.scheme not in {"http", "https"}:
81
  return None
82
  parts = [part for part in p.path.split("/") if part]
@@ -270,6 +276,30 @@ async def enrich_with_stars(
270
  await asyncio.gather(*(one(r) for r in rows))
271
 
272
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
273
  def main() -> None:
274
  cfg_dir = Path(__file__).resolve().parent
275
  cfg_path = cfg_dir / "awesome_scrap_config.yaml"
@@ -331,6 +361,10 @@ def main() -> None:
331
  f"Fetching stargazers_count for {len(rows)} repos (concurrency={args.workers})..."
332
  )
333
  await enrich_with_stars(session, rows, args.workers)
 
 
 
 
334
  out_parquet = output_dir / "awesome-repos.parquet"
335
  output_dir.mkdir(parents=True, exist_ok=True)
336
  # include stars column if present
 
36
  import sys
37
  import yaml
38
  from dotenv import load_dotenv
 
39
  import pandas as pd
40
+ from github_api_utils import (
41
+ github_headers,
42
+ fetch_repo_description,
43
+ fetch_repo_readme_markdown,
44
+ )
45
 
46
  load_dotenv()
47
 
 
79
 
80
 
81
  def canonical_repo_url(url: str) -> Optional[str]:
82
+ try:
83
+ p = urlparse(url)
84
+ except ValueError:
85
+ return None
86
  if p.netloc != "github.com" or p.scheme not in {"http", "https"}:
87
  return None
88
  parts = [part for part in p.path.split("/") if part]
 
276
  await asyncio.gather(*(one(r) for r in rows))
277
 
278
 
279
+ def _desc_is_missing(desc: Optional[str]) -> bool:
280
+ if desc is None:
281
+ return True
282
+ s = desc.strip()
283
+ return s == "" or s == "-" or s == "—"
284
+
285
+
286
+ async def enrich_missing_descriptions(
287
+ session: aiohttp.ClientSession, rows: List[Dict[str, str]], concurrency: int
288
+ ) -> None:
289
+ sem = asyncio.Semaphore(concurrency if concurrency and concurrency > 0 else 10)
290
+
291
+ async def one(row: Dict[str, str]):
292
+ if not _desc_is_missing(row.get("description")):
293
+ return
294
+ async with sem:
295
+ owner, repo = parse_owner_repo(row["link"]) # link is canonical
296
+ desc = await fetch_repo_description(session, owner, repo)
297
+ if desc:
298
+ row["description"] = desc
299
+
300
+ await asyncio.gather(*(one(r) for r in rows))
301
+
302
+
303
  def main() -> None:
304
  cfg_dir = Path(__file__).resolve().parent
305
  cfg_path = cfg_dir / "awesome_scrap_config.yaml"
 
361
  f"Fetching stargazers_count for {len(rows)} repos (concurrency={args.workers})..."
362
  )
363
  await enrich_with_stars(session, rows, args.workers)
364
+ # Fill missing descriptions by querying repo short description
365
+ if rows:
366
+ print("Filling missing descriptions from GitHub repo metadata...")
367
+ await enrich_missing_descriptions(session, rows, args.workers)
368
  out_parquet = output_dir / "awesome-repos.parquet"
369
  output_dir.mkdir(parents=True, exist_ok=True)
370
  # include stars column if present
data_collection_utils/github_api_utils.py CHANGED
@@ -224,4 +224,79 @@ async def fetch_repo_readme_markdown(
224
  return await d.text()
225
  except Exception:
226
  continue
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
227
  return None
 
224
  return await d.text()
225
  except Exception:
226
  continue
227
+ # Fallback: list the repo tree at depth=0 (root only) and select any file starting with README*
228
+ try:
229
+ # Get default branch to address the tree head
230
+ repo_url = f"{GITHUB_API}/repos/{owner}/{repo}"
231
+ default_branch = "main"
232
+ async with session.get(repo_url, headers=headers) as rinfo:
233
+ if rinfo.status == 200:
234
+ info = await rinfo.json()
235
+ if isinstance(info, dict) and info.get("default_branch"):
236
+ default_branch = info["default_branch"]
237
+
238
+ # Depth=0 tree (root only). Omitting recursive parameter implies non-recursive.
239
+ tree_url = f"{GITHUB_API}/repos/{owner}/{repo}/git/trees/{quote_plus(default_branch)}"
240
+ async with session.get(tree_url, headers=headers) as rtree:
241
+ if rtree.status != 200:
242
+ return None
243
+ tree = await rtree.json()
244
+ if not isinstance(tree, dict) or "tree" not in tree:
245
+ return None
246
+
247
+ entries = tree["tree"]
248
+ # Find candidates that start with README (case-insensitive) and are files (blobs)
249
+ candidates = []
250
+ for e in entries:
251
+ if e.get("type") != "blob":
252
+ continue
253
+ path = e.get("path")
254
+ if not path:
255
+ continue
256
+ name_lower = path.lower()
257
+ if name_lower.startswith("readme"):
258
+ # Priority: .md < .rst < .org < others; shorter names first
259
+ prio_map = {".md": 0, ".rst": 1, ".org": 2}
260
+ ext = ""
261
+ if "." in path:
262
+ ext = path[path.rfind(".") :].lower()
263
+ prio = (prio_map.get(ext, 3), len(path))
264
+ candidates.append((prio, path))
265
+
266
+ if not candidates:
267
+ return None
268
+ candidates.sort()
269
+ chosen_path = candidates[0][1]
270
+
271
+ # Fetch the chosen README variant via contents API to get a direct download URL
272
+ contents_url = f"{GITHUB_API}/repos/{owner}/{repo}/contents/{quote_plus(chosen_path)}"
273
+ async with session.get(contents_url, headers=headers) as rc:
274
+ if rc.status != 200:
275
+ return None
276
+ cdata = await rc.json()
277
+ if isinstance(cdata, dict) and "download_url" in cdata:
278
+ download_url = cdata["download_url"]
279
+ async with session.get(download_url, headers=headers) as rd:
280
+ if rd.status == 200:
281
+ return await rd.text()
282
+ except Exception:
283
+ return None
284
+
285
+ return None
286
+
287
+
288
+ async def fetch_repo_description(
289
+ session: aiohttp.ClientSession, owner: str, repo: str
290
+ ) -> Optional[str]:
291
+ url = f"https://api.github.com/repos/{owner}/{repo}"
292
+ try:
293
+ async with session.get(url, headers=github_headers()) as resp:
294
+ if resp.status == 200:
295
+ data = await resp.json()
296
+ if isinstance(data, dict) and "description" in data:
297
+ desc = data["description"]
298
+ if isinstance(desc, str):
299
+ return desc
300
+ except Exception:
301
+ return None
302
  return None