github-actions[bot] commited on
Commit
b9d49bb
·
1 Parent(s): 28d211c

Auto-sync from demo at Mon Sep 29 07:30:38 UTC 2025

Browse files
Files changed (2) hide show
  1. graphgen/generate.py +1 -1
  2. graphgen/utils/log.py +41 -18
graphgen/generate.py CHANGED
@@ -65,7 +65,7 @@ def main():
65
  "GraphGen with unique ID %s logging to %s",
66
  unique_id,
67
  os.path.join(
68
- working_dir, "logs", f"graphgen_{output_data_type}_{unique_id}.log"
69
  ),
70
  )
71
 
 
65
  "GraphGen with unique ID %s logging to %s",
66
  unique_id,
67
  os.path.join(
68
+ working_dir, "logs", f"{unique_id}_graphgen_{output_data_type}.log"
69
  ),
70
  )
71
 
graphgen/utils/log.py CHANGED
@@ -1,32 +1,55 @@
1
  import logging
 
 
 
2
 
3
  logger = logging.getLogger("graphgen")
4
 
5
- def set_logger(log_file: str, log_level: int = logging.INFO, if_stream: bool = True):
6
- logger.setLevel(log_level)
7
 
8
- formatter = logging.Formatter(
9
- "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
10
- )
 
 
 
 
 
 
11
 
12
- file_handler = logging.FileHandler(log_file, mode='w')
13
- file_handler.setLevel(log_level)
14
- file_handler.setFormatter(formatter)
15
 
16
- stream_handler = None
 
17
 
18
- if if_stream:
19
- stream_handler = logging.StreamHandler()
20
- stream_handler.setLevel(log_level)
21
- stream_handler.setFormatter(formatter)
22
 
23
- if not logger.handlers:
24
- logger.addHandler(file_handler)
25
- if if_stream and stream_handler:
26
- logger.addHandler(stream_handler)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
 
28
 
29
  def parse_log(log_file: str):
30
- with open(log_file, "r", encoding='utf-8') as f:
31
  lines = f.readlines()
32
  return lines
 
1
  import logging
2
+ from logging.handlers import RotatingFileHandler
3
+
4
+ from rich.logging import RichHandler
5
 
6
  logger = logging.getLogger("graphgen")
7
 
 
 
8
 
9
+ def set_logger(
10
+ log_file: str,
11
+ log_level: int = logging.INFO,
12
+ *,
13
+ if_stream: bool = True,
14
+ max_bytes: int = 50 * 1024 * 1024, # 50 MB
15
+ backup_count: int = 5,
16
+ force: bool = False,
17
+ ):
18
 
19
+ if logger.hasHandlers() and not force:
20
+ return
 
21
 
22
+ if force:
23
+ logger.handlers.clear()
24
 
25
+ logger.setLevel(log_level)
26
+ logger.propagate = False
 
 
27
 
28
+ if logger.handlers:
29
+ logger.handlers.clear()
30
+
31
+ if if_stream:
32
+ console = RichHandler(level=log_level, show_path=False, rich_tracebacks=True)
33
+ console.setFormatter(logging.Formatter("%(message)s"))
34
+ logger.addHandler(console)
35
+
36
+ file_handler = RotatingFileHandler(
37
+ log_file,
38
+ maxBytes=max_bytes,
39
+ backupCount=backup_count,
40
+ encoding="utf-8",
41
+ )
42
+ file_handler.setLevel(log_level)
43
+ file_handler.setFormatter(
44
+ logging.Formatter(
45
+ "[%(asctime)s] %(levelname)s [%(name)s:%(filename)s:%(lineno)d] %(message)s",
46
+ datefmt="%y-%m-%d %H:%M:%S",
47
+ )
48
+ )
49
+ logger.addHandler(file_handler)
50
 
51
 
52
  def parse_log(log_file: str):
53
+ with open(log_file, "r", encoding="utf-8") as f:
54
  lines = f.readlines()
55
  return lines