Spaces:
Running
Running
Commit
·
a2b7a6b
1
Parent(s):
625dc70
Report generation only includes the last two releases
Browse files
.github/scripts/process_report.py
CHANGED
|
@@ -18,22 +18,23 @@ def format_datetime(dt_str: str) -> str:
|
|
| 18 |
return dt_str.replace("T", " ").split("+")[0]
|
| 19 |
|
| 20 |
|
| 21 |
-
def read_json_line_by_line(file_path):
|
| 22 |
"""
|
| 23 |
Read a JSON file line by line, parsing each line as a separate JSON object.
|
|
|
|
| 24 |
|
| 25 |
:param file_path: Path to the JSON file
|
|
|
|
| 26 |
:return: List of parsed JSON objects
|
| 27 |
-
|
| 28 |
-
This function is useful for reading large JSON files that contain one JSON object
|
| 29 |
-
per line. It handles JSON parsing errors gracefully, skipping invalid lines.
|
| 30 |
"""
|
| 31 |
data = []
|
| 32 |
with open(file_path, "r") as f:
|
| 33 |
for line in f:
|
| 34 |
try:
|
| 35 |
item = json.loads(line.strip())
|
| 36 |
-
|
|
|
|
|
|
|
| 37 |
except json.JSONDecodeError:
|
| 38 |
print(f"Skipping invalid JSON in {file_path}: {line}")
|
| 39 |
return data
|
|
@@ -216,9 +217,22 @@ def analyze_support_changes(prev_csv, curr_csv):
|
|
| 216 |
|
| 217 |
|
| 218 |
def generate_report():
|
| 219 |
-
# Load
|
| 220 |
-
|
| 221 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
|
| 223 |
prev_dict = {(d["model"], d["device"], d["os"]): d for d in prev_perf_data}
|
| 224 |
curr_dict = {(d["model"], d["device"], d["os"]): d for d in curr_perf_data}
|
|
|
|
| 18 |
return dt_str.replace("T", " ").split("+")[0]
|
| 19 |
|
| 20 |
|
| 21 |
+
def read_json_line_by_line(file_path, commit_hash=None):
|
| 22 |
"""
|
| 23 |
Read a JSON file line by line, parsing each line as a separate JSON object.
|
| 24 |
+
Optionally filter by commit_hash if provided.
|
| 25 |
|
| 26 |
:param file_path: Path to the JSON file
|
| 27 |
+
:param commit_hash: Optional commit hash to filter data
|
| 28 |
:return: List of parsed JSON objects
|
|
|
|
|
|
|
|
|
|
| 29 |
"""
|
| 30 |
data = []
|
| 31 |
with open(file_path, "r") as f:
|
| 32 |
for line in f:
|
| 33 |
try:
|
| 34 |
item = json.loads(line.strip())
|
| 35 |
+
# Filter by commit_hash if provided
|
| 36 |
+
if commit_hash is None or item.get("commit_hash") == commit_hash:
|
| 37 |
+
data.append(item)
|
| 38 |
except json.JSONDecodeError:
|
| 39 |
print(f"Skipping invalid JSON in {file_path}: {line}")
|
| 40 |
return data
|
|
|
|
| 217 |
|
| 218 |
|
| 219 |
def generate_report():
|
| 220 |
+
# Load version data first to get commit hashes
|
| 221 |
+
with open("report_data/version.json", "r") as f:
|
| 222 |
+
version_data = json.load(f)
|
| 223 |
+
|
| 224 |
+
# Get the last two commit hashes from releases array
|
| 225 |
+
releases = version_data.get("releases", [])
|
| 226 |
+
if len(releases) >= 2:
|
| 227 |
+
curr_commit_hash = releases[-1] # latest commit
|
| 228 |
+
prev_commit_hash = releases[-2] # previous commit
|
| 229 |
+
else:
|
| 230 |
+
curr_commit_hash = releases[-1] if releases else ""
|
| 231 |
+
prev_commit_hash = ""
|
| 232 |
+
|
| 233 |
+
# Load and filter performance data by commit hash
|
| 234 |
+
prev_perf_data = read_json_line_by_line("dashboard_data/performance_data.json", commit_hash=prev_commit_hash)
|
| 235 |
+
curr_perf_data = read_json_line_by_line("report_data/performance_data.json", commit_hash=curr_commit_hash)
|
| 236 |
|
| 237 |
prev_dict = {(d["model"], d["device"], d["os"]): d for d in prev_perf_data}
|
| 238 |
curr_dict = {(d["model"], d["device"], d["os"]): d for d in curr_perf_data}
|